var/home/core/zuul-output/0000755000175000017500000000000015136704533014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015136710246015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000227770215136710070020267 0ustar corecore8{ikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD ~YI_翪|mvşo#oVݏKf+ovpZjC4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8T*v (6pk**+ Le*gUWi [ӊg*ͼ~aT(;`KZ)&@i{ C2i1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND lM;ָPZGa(X(2*91n,50/mx'})')SĔv}S%xhRe)a@r AF' ]J)ӨbqMWNjʵ2PK-guZZg !M)a(!H/?R?Q~}% ;]/ľv%T&hoP~(*טj=dߛ_SRzSa™:']*}EXɧM<@:jʨΨrPE%NT&1H>g":ͨ ҄v`tYoTq&OzcP_k(PJ'ήYXFgGہwħkIM*򸆔l=q VJީ#b8&RgX2qBMoN w1ђZGd m 2P/Ɛ!" aGd;0RZ+ 9O5KiPc7CDG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xmv>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?~=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2C` |ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@4ޙ-did˥]5]5᪩QJlyIPEQZȰ<'$VO"dwEр%}5zW]S)ZbF p$^(2JцQImuzhpyXڈ2ͤh}1ieQ*-=hiך_%הrΒ7 _:+$ߗv{wzM$VbήdsOw<}#b[E7imH'Y`;5{$ь'gISzp; AQvDIyHc<槔w w?38v?Lsb s "NDr3\{J K纂P//Q۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܛ1z.Tvg.0Mn}#~sz1}z}Vܟ? 8CWA˫[LznbWoѷ 'hG iׄ?:JmE1y<6"`3T7h|ޒe|if]ޒywUnj|_$cݢaȤ금~{죤z[=_d rtc\MU&IzhBYň(&&WV#@..$ 91XMBQϷmQm{mO!_ysW1!n<0!2~.볿j HmE]j `7ruuŨԀ![Z !iHlf[7Ua6BEZd9 NpydqZrS6U A~@Ve Ȇ*dXsl o:d lzzMvYź ^ ٲAPm쪊m\9htwmjQ\c5&,|^C.SS P󂏛o n8Fkb^s/&a[s~W &ɿ^\r\ߺn`Q6eMӁKzFZf;5IV1i[xS 0FPM]gl}>6sUD_k;A@z|VvlK۴ymk×6?oK`xRfR"Au\PmLZ"twpuJ` mvf+T!6Ѓjw1ncuwo':o gSPC=]U҅yY) &K<-na'Xk,P4+`Þ/lX/bjFO.} @| 6+#gPX>B[2_@L `CZ?z3~ }[ tŪ)۲-9ֆW!?xL 2ڲ]>Ni+m^CM&7Tj7ȗE!^lPw0|P!:-N@SĔRwtce܂m(^Q!j-7BtKZK]FkM\7"Ǻz鈷:ݓ3<:~iXN9`2ŦzhѤ^ MW`tw] ߇K]2t~f> Wa!fA[?~R6*.9t,綨  6DFe^u; +֡X/@: pa} O K`pC?fE?w6{ 5Ʈ]խ4+p4@]p6Uu|,AZC@{GP 9::3(6e™nvOσ =?6ͪ)Bppًu_w/m/0}T>CUX\!xl=ZVM\aٟ6h㗶E۶{O#X26.Fٱq1M k'JE%"2.*""]8yܑ4> >X1 smD) ̉TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2.l9fbn,2姟d4,?+AWzw A<4 }w"*mj8{ P&Y#ErwHhL2cPr Wҭюky7aXt?2 'so fnHXx1o@0OTmBLi0lhѦ* _)[3L`I,|J @޸XFF"4En.O@`<ك78ϵq\FOXƀf qbTLhlw?8p@{]o_;E#s:ic.XC^wT\MLX7sTGP7^srq8;G&JS meT;{З>'[LR"w F05N<&AJ3DA0ʄ4(zTUWDdE3̻l^-Xw3Fɀ{B-~.h+U8 i1b8wؖ#~zQ`/L 9#Puϴ<4A L<KD U(Ee'sCcq !=ˤͅrKkJBxF&+62,b.-Z*qqdX>$'dW<qIE2Ľ)5kJҼMЌ DR3csf6rRS|[I߽ogCc;Q5ׂdsZ=M3դ#F;QYƘK`s<<ƛ G׌MU.APf\M*t*vw]xo{:lȥn0橶FQ(ZL:m\q!<'J[FJ,4N:=6. +;$v6"I7%#CLTLyi{+ɠ^^fRa6ܮIN3ޖ:DMz'rx#~w7U6=Q0+ň+[Miw(W6 ]6ȯNii@gIi3]v_ r[Lo{o# yTqM8@4Q2lE >)kre_f |Nm8p5H!j }%gK(DjrO' ,: k5u]#LLb FDK|۟OS栉 A*[Jd >|ק_;%X6Q@d 8&a)a.#ۿD> vfA{$g ăyd) SK?ɧM6rN+LxE>^D]EڬTk1+tnsǴ5RHİ[qL\}X` >+%ni3+(0m8HЭ*zAep!*)jxK:Up~gfu#xؾ .2ןGRLIۘT==!TlN3ӆv%oV}rN~ˊc,߹,=;2lVǻXEB6;5NE#es~ax޾8\0U'&2ihbvz=zl5|JE2z=wcMԧ ax& =`|#HQ*9ry\wW|~5Ԟ '!9MHK:9!s,jV剤C:LIeHJ"M0P,$N;a-zs=1/u&%].]y#z18m@n1YHR=53hHT( Q(e@-#!'^Ak$wTg1!H$|HBTf̋ Y@uwiFī h[W,Ê=j8&d ԋU.I{7O=%iGtxvBչ̳@1+^.r%V12, _'j"2@+ wm 4\xNtqwc&dXV0e[g#B4x╙✑3'-i{SEȢbK6}{Ⱥi!ma0o xI0&" 9cT)0?5ڦ=>"LgdJΆmΉO]T"DĊKٙ@qP,i Nl:~6'5R.j,&tK*iOFsk6[E_n`׎tp Nvw/oW4|<{H@#*h{Yp/E%dlh\bU:E%h@&SEK [ Ƣ x'Oz=CԒm4,tӛ"N88YܬUʽw c-csC{T-MzpvǸה-7֢ҙ40䆢^;4 9LڑKL|`PОnG#|}qOR{Vt2_tHߪ%pD?1%(@fxKrs45rMլf{sk7fjӞh2Hke_O'˿mȠ%>9cSH|cEyQp 'ˢh:,vw#[D6#6'mJTs>"tLvjkB|rN`)矻81 .&Ӎsēj\4iO,H̎<ߥ諵z/jYv4 0tU;[+8&b=zwɓJ``Fiwg s!8!ߐ>'4[7/KwNθW' >ݛ/_[z]Ȓtyڼ|=* lH#=M\`'%tYWm𓶝?Di륍sF,]VnSSJCҖԻq=ky^L6/R%eZ;i.p胉,y4F"37!ېATJKB2Z/"BfB(gdj۸=jB ]e>&ngl9%islԃ)Hc`ebwĪ3nZg0FRYeO:F)O>UD;;CY,2ڨi"R"*R2s@AK/u5,j#u>cY^*xk7%wCvpۊ ~;ɰ@ՙ.r{T?M0:;}d8Lj ݨW>Λ[Vhi/̥̒9$W!p?8=iU;߶o{+~e3]:({ܸf)*gCQE*pp^~xܽ`U'^vE90t~8-2S󥞙nc56s&"mgVKA: X>8[#yN SrA GYb8ZIaʼn8 #fg3i`F#5N 3qb$!qNCJ4blnv!+lR@;HspSI]ۡCZUck5pDcI9,oN-_XI,3\j ]?}G]c ې!rGHw@56NDq LA!&mYJ*0<>[Vݯ,%M{,#SY(H_USFC"6mݡ"3`Rd1e6d逖`7rCMxR2=ϫCy~ӡ` ^ y!k!9yL%VLU2gr26A!4vbS ]Wp+&ee *w -`J\ ppǣ}^~#_|o{ К8EW>*)D{ޛ$fnY𱹜M>4&$/"lX-O_vwrpP40ޢE[A͝ Z5 om2p)lbp/bjWd{R\' 礅_}\Ƕ:wNgOb{}IStg_pvIxpI"Oc S GMV+Z:H2vd,P4J8 D72?of1>a$]Nbs=Kx̊ygbE&>XYs䀚EƂ@J?n>lhTm'nܡv+0fqf٠r,$Zt 1-d}2Ozr@3?]^M ܓmBɽkQ| }^~n6Fg86}I h5&XӘ8_->b _ z:9 Z>eUŎmxTL̵F8ՅX/>'qwߒZȆF 3U>gC}Y }S1JG9TV\-B{MӨ&Ę4.s"x| 76ү4n;[4E8#yrH9=v֍/8. ZsߴIJ>&I?L6i}Y^XpCًݽk-$pxbڲ&6*9mg>{rtD)OQ`߸hNȶb> 8yݪkIf-8>V#ہll/ؽnA(ȱbAj>C9O n6HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2w⠪R/r| w,?VMqܙ7;qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]gQ)Bی:D`W&jDk\7XD&?Y\9ȢG:${1`+i n8=%Ml%İȖb7AޗuV3A7ำqE*\qb'YpuHƩҬV nm=Ɂ-2=|5ʹ zi ' ׹U>8bK0%V\ t!Lku`+]c0h&)IVC)p| QUA:]XL/2La[Xѓ F;/-rtx-rei0hE˝ݸDt#{I} `v;jUvK S x1Q2XU&6k&lE"} Q\E)+u>.,SzbQ!g:l0r5aI`"Ǒm O\B!,ZDbjKM%q%Em(>Hm 2z=Eh^&hBk X%t>g:Y #)#vǷOV't d1 =_SEp+%L1OUaY쎹aZNnDZ6fV{r&ȑ|X!|i*FJT+gj׾,$'qg%HWc\4@'@—>9V*E :lw)e6;KK{s`>3X: P/%d1ؑHͦ4;W\hx锎vgqcU!}xF^jc5?7Ua,X nʬ^Cv'A$ƝKA`d;_/EZ~'*"ȜH*Duƽ˳bKg^raͭ̍*tPu*9bJ_ ;3It+v;3O'CX}k:U{⧘pvzz0V Y3'Dco\:^dnJF7a)AH v_§gbȩ<+S%EasUNfB7™:%GY \LXg3۾4\.?}f kj· dM[CaVۿ$XD'QǛU>UݸoRR?x^TE.1߬VwխmLaF݄",Uy%ífz,/o/Z^]ݖF\\UR7򱺹...m/~q[ /7n!7xB[)9nI [GۿsH\ow!>66}եl?|i [%۾s& Z&el-ɬeb.E)բA l1O,dE>-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1|ttc‡-5=VrPhE0Ǐ}Wd|\aD;(;Ha.]1-{s1`HbKV$n}Z+sz'ʀ*E%N3o2c06JZW?V g>ed\)g.C]pj|4逜*@ nBID f"!!*7kS4޷V+8弔*A19`RI/Hй qPq3TY'퀜+/Ĥ'cp2\1: 0mtH,.7>\hSؗ΀ѩ آSNEYdEcaLF&"FhQ|![gIK v~,Jc%+8[dI368fp*CDrc3k.2WM:UbX[cO;R`RA]d+w!e rr솜[/V`+@;Τ`5d0ϕ_Lع`C"cK>JG.}Ε00e>& 2䯫vNj31c$ i '2Sn-51Y}rE~b>|Ď6Oj~ebIapul9| 3QtUqSCxTD7U9/nq.JYCtuc nrCtVDƖϧ;INOKx%'t+sFUJq:ǫf!NRT1D(3.8Q;І?O+JL0SU%jfˬ1lމZ|VA/.ȍȱh M-r ~[0AG꠭y*8D*-Rz_z{/S[*"꫒?`a;N6uilLn<Yllmb rY״͆jqTI!j.Pٱh s!:W_´KxA|Hk1nE6=W|$O -{]1Ak$ ѫQ6Plp;3F$RveL l5`:~@c>q,7}VE-Q8W70up˳ A¦g/OEU:غA>?=CۣPqȅlW11/$f*0@б 2Dݘrt +qrx!8 J&[V =͋A,z`S,J|L/vrʑ=}IhM4fG(Ȋ1{TT%41?m\+F/-R4 N6], JlU=d;dNg[?p4/3cUM;95HQ4y6^Dƪ0h@A,ƪ$2Am#B"(e%OM9vXI!W$$F5/fk*8wܞ$_~C^8Bȗ̏V d*RZGe0VXMy }oj1V bbpyp[ HxŠɢdt9vXU ƧBOR%dz(%3S1ȴHq ! ˻"q3^S}ZțqrC'X% @3d@ OIى_a~lq?7铐sǣ$t_+c/B3ϋ+Frr(cz?7W'ox~ϋ+ay^\fLD+ #e9j9Ƒ0F֨ 3J-qe^{PE^3t1s]jHA!|9&y?4*&@l/= v0jǓI,2<#cWYY^h~FzşKXPK<# ~N~;M]rq|W礧)cQ礯I(2C$"3=߰M3Yԡ븖K [&#uX}+iqȓNˤ.s%/DKkT8V[l2FGc$߱awLfwU %Q Ο}GF8MbɪгA>ꨢPƂڄGAXe6! l0R/U^y[f@Rb;`wtFQ]TQ̴Hlc,&lx ЏEdԲlD&ƙ=!=kI$͛!o ŧQNpEJTe<2k̪-Ä|~7"|,%k HQƀ>ƚ5啩0DE^!I$ `"z1c!IKbj7 OLLOꅘ%IUd>wGUzEe:bz9QVWWլ]%XKu%Q{]jWѼBM⼜V \uj>Y: +m )ubDM%g%'9,xM<'_tL%RJ'qS7@ĽugjFgZS / J2sp%+1??ei29HY7h~8{:#}" |(ڞd{SBL&G}>LKJߓ)Adq w,2hDVy%bTJ.:_kb6~Ê+-{d3ϳߘo|,59<tX&q ߞ\(S>;tb{Squ;mwy$p+bO;Q_k2:P0{^Y$*T6EcMJ(y&ᙗ|ɸsUrky Y ![m$̛2E __|QQ)jMƿ . !"*ၺC~aDTO>M /:$k{/*PsAd Mo7#@:^hpL <-wiIug޾lH .QDE]]o{v)YRUpOo"ܑzH"дY+o=DOC5iʒhOV@|T6(G)fMR>ߜ^U>adJ>O;#˦PBFu vI, GvXSX}E*{r)jMޜl3qIʪZkq]6YTDᢊ׷OenkCYKˤyVd޴\,[Ѻ2\լyLDʬaxsiY6q)*@[Fs]eg\ԥ q*(mWZfނ$<i׽RM)k]3("^HN:Oվ*h )6ly&oZ_)pvl Իg*(-(lo4ɛuÓVqUX}vQ}fCq_!(}~$QtZ*u"VtBXv*@/s93>"yR!Ey`Z1#ߴe)㣓߶q} 6hEТ JMݧ’ck B!,Ѭ(pVavu'gQԷ>G T5C8$p35iF mu%q؞fQ¹:gDm{|Խ[; G(J{2 lxCH0_`84EX0 G,Ek#g#Qu@`eM1|c?U(pw:8$j/ u;g:RVyy׻Sq{G3Aj\oc(\bZOŭ\߿ڻx{J3#JP769!$8daE!pa\dؼju Ɇ FM'"49q%Nj \9R.Tm7d+O|mb3V|yUv4x`|R + es>8v>>VVf~.gyXK{j\+2祺H#(~sȷv>%A>C,0uyNB&Ahb!UFx.{oVS_wi]>JՖDXpAjl\1}jjjf::dYr I␵\#`.D$!<+Ƹ}_/;2lj]-+ˬ-LwE~Z4 Gt!Pٶn D?#4wp`U.u.bc;vmMgXYu} cV~ن#DwxEHCt^MO 55]0bX !w 2I wL{w][Ռp.GA(p> X8>j?_zp'P)Te`qӖR0T&!U.3|5*%m- Q5cpY Dܙ4{ab2Kw­ "Se]!15(".1sE/p9I ϶zo>Z'Uw,o0z U#w{iυ*Hx~"CyPWt[+q;Tm[y'veż@s:|Ѻ,w5K3qFZwq4WA?ѻjq#35Gٍ60|~wCi~KE_Oa.KӘ*2q_s]`r2ף QZŶ#P0Z2 m;@0>c`F21[tfKd&X!z5y~jt!M2?zο(# ?z,&^l!g!Lds0L|<@=Wk~|hаc<?cb? : HdcbFp >1CTl; .=Ht]pd0cށpbşmw w<ڃh!t2gjK(*-CNq~qa! rw4J jOjC|z`LF*:17\|R.J ̡+:awq_!.K$ 0 NzFgJZ[Zړv )r6X-ն$! {w٥:—n/ޏ,S;1 oBHw{,,|kC6WJO0OLܮw8 y|^\Y,|,}άk]\sd7'zME ǺNI\X\Wυ%>І'G= 給|gnP.Cēq&Cy8WCV/Ӡ ,X-]C~zގ?wމw~~;`Lc%6wvz lhL̀U#!{=c@8X|NH@>@c {k'=W,|;x ;se" ;h^8 !GG."&;X$ظ? YurPdOxo7ǀA^ypxaHy뾨/$"=2$ X"wd^ol}F^#O["B#Ha;۝-pXllغ|y4/[ .ݹO N`}P(!'L%PBޗbsQ~xt{޹`O x-1: &bv=SeuȾ ?;`Fu0,M^g {CTO!xMvN]RiB.~(^o_80 @ /[ PAbFT(^*A(o/?^g* @nc^~?ؤHsHءD zt,G{w ˟IaaiO t])ɳ~)ﯡMR)}$T:K# 7 7~R ћv"b6[tkyE.ĎiK>ҷtxP/G^2o@/~pѻNx_A&2N\Uh v3O6i;{Zǖbgԩ8 Q&C/S9L#CG0Յ,(t42ΪÐu)K&` imLgE {0t_&ԡ!&ʡ)-|4RZ$q~&%A)/RH' ̀ RpUhU@H^%nD~R!/1._~[ oJ*=4"U8H^x,հp p9E"S|hUpDW'$ޓ5a8sw|EҀYd:n r-Y 3%/ yE&UyIU\@5`G/y Q$,tpA zWOR t&9.OL^]jQ("Q=KŊA<5ܖ# @܀fVHm2#Yf_gyD( G@ҴO?Zfq:u)#.ĥ-.bDUUجvG 7@ ^s17Ru~ "3t6HE Vq,`uM9*9\: > h騤1 4 l0?XϘQ]kVw1TPW?@׫2 V`LCD 2O$p63=![#}'CCF~q}D}VڇoeQr1T 旳IMưQGQ, X 5ƦIWzsos5z5ݩkkz 61C#NUW*<[ ܪӇ*hu]X;Y_!7B  Y^c }5wjxJƃE.wEui D /Y~#='{ z ~tqSJ1mToQ avYV7h>I`q u}C 7G7ɻ_I}`؝s}ϯΏk IaU^<;B[7ƥ^(_ѥ ޚ17n4LwE‹`Ix$Po^X((6N(G?Pz~( #?/ʫblc9/VC#4ʞQޅaHDһ 3PJJh;Hںj覑LO /`[Q9[L,'oloZަOX|{ZiE|i (`M9Ԛ,/ ٯbX᷶%:>PzKm}DOUnկ;v}L>oݹ.[mA,۞V4R#)e[ [ʷ'?PHBzz[mO4BGmAX'TlA؞P4B# [oA=I:<[nAh= IhrPr{B$Tn tE61]+K }AZKQ^DgMswg93#ً5G<8mzbӫR6xvSepђg !YU&#^8xfOs/?Y?D4" ZS( E`W-N>j/H=60 *4Dd_aխjqn9@#ZM$d",q P~YD9;||'ripIujO4OQ3'oZ״V߿?U,X2C%̉|N}V$߰ku>st8 ,L0oŔjU``\ X Pa0 Z֬LxxS>]P&~b;4d;P au7=y>O4S[ze%lq3*^OUxn {l2UH;Ib~ԣ 4"7 d +wDlNEgfZiziH;S&:u??/JY5OmWR 'k F5"I{{VYZMu^Q?|zxa7wȸʽa[GyKPxd x e{89c]3AW`i^ "_n[\D*D}DrxE\,prf(l:F+ټvT z\ @-ZCfz$|a AJ;Dֈ.z>P$#5bRہ @v':#;ciy]\UmV$va:x2ABU%!$4L;XApij7gQ!m\q9LhlkտXw\[\#EYo)Oڼ`YYnJK 1qܫdnG달N%|“GQcH5kmA^fueR #=8Ry\gf\Y˸38Ɩ Yfi>ra`u#F,r&i@A 8+k^VRCd0*KϴEiX-h;C-Hs4bI:Xxy]ag;ۙǹcg(ըY(=F(=FTf9Uy tAʄRǟ#铣\&vp7fW\. .5\ ,T@4Kk 'V-ZmDd#ʉ꫶-EEi&b p^ ' UR=/P5^qjLu*d$!9ZUg=IYVi^1hpXfөiPF T#_i\2KWtG9Yt ӕ7ƋsR==6lӌw1)d;1R:1Qڙ| GPmxNd"" G(Rd[^PgcA#]Qf.Z,u/zͿ~B/Xp"ax.hW~??7¿-|Rn+Z֗o\yae_߬W8W7}L*וo)Oot^Yo-?co n֡x+.o?|'vg8.niZ'.ZX{Qo?po.=2gxE_\Ivpo~8䚐LJ.Rs b^^tasvg')SK?LkΨ /̴ʙ0 *Y%cQW+1X>{lZ4N?Æ<8ÇW]"Ąiu/XΎ]q<91Z뙷L-k\XbGHA7A/7i2Dfhêʞ쒷kcc̰$U(*9q䖳M튱5={Zx6I<,oƲ=EwzV:щ♒اgY[Z#$ry",{g?mz^Pm]OR' 9 qaQ*gɡ#k%6Wtm,34PZ;J2Ykbp$!kXޚyn%6=n]2I$.X y- ~N_QL`!ySH,O ,~Wv${ǽ2aoj<jt6m;(bhòJ%y]K'kpӃ}$qpsnV=UO# s!h;QDǼ9[x}N81Np,G;6-LBkeeXE}TJȰl҇VRvau٦zJg nl7k}Y!!8IΰCNBueU!'dM [)xWSLWxјudU\m!~1N[68wxH(1(Bd  24K'>݄5DX:u)z% q[5$۫̒EF"CEos"Qqr^>(K,6EJ&V-M6 -l.)CSNv϶{  vC.De{X5K$o5̓F^`,lH61(k7c"dvuTcG*U eR#$҃I7N o IY##қ`,,ğv$Z˱ ѥĽSe &ճd*ZpKX:-BRGQ#Q[/,C]@g(V9: BY lQ( c;/v[д8m/;45.V<.~:kUƔP&Κ`92(Pfp6Uyn=5*N BF˥?`cUT}F#+bM'0I30n_,xW o47_h 9 w%ٹ_V<#garSR# ȋL^5OA *; b,XMPҕ.XȼO'.x_cb{GH,;jb޼w:\P"!iU-3QBQ:NrW삲dN;w|i~uz4㧲-L_K5 b=nr~ H`|vkPRg-粊^!X YaYJȔ pS83XBiQל&pQ\:i]=<@~26QNL(<` g\؃qa^f>pb V\ʆlX:5φpQBGn>FqO=:VhE)HQ]l5^v]T]k0)mȢ#jۇGCop빒FjDw n/J{ƅ(~LRp/$C N|Ͻ%`.NHϥbO(IbOcQ `UhDHu7Sv[Hpܪyn%0)#>ò?V"et`|nQ(G'Ff* ˇW8x/{+X҂MٴRIx,i%ڔNZ oYό1J'XK_ʍc-y .jUuK GV"n4OiS0.j:* mgu )7 A1L9aXG65@z7TM\aj{ XJ=Z /`,J经$8ؐO{~X/~u[H>[{&JW~TޕR0w1fSM;q s9s]o>cW}xPHZMZ9RםQ^p7L8oX:-;IpL9sVt@{p9t̎2\HmVWRg#0eHRVigV#^riD)fz\Nd\ h.KpJQ'|WO;G,YW"PNs1cICk=5x똒Ch;olv0փν =V0 uOщhuQku1R.rkJ' vv옦{(ᑭatuT X8dP8pCJZV$_ad ?RZ.!2 H"'u sӶ 5 |I(Zsa^& \{󘙝JbV5(#'P5x[:Jz# חwy x:㓴{Mѕڵп'8gG'?ƇE>zY~]1ދA. *W"Z-KEh&x 0yWoY: 窜1C: x;؁o~ڐh 6?,6;' DřeOd<dd3Mcr1'hn& A]pl럷?Qh8H^s kOqέ8 Z[738E-O~>zETwir'Aq*ṫ,EkV5FH2^xE0oFK[?^` ";bC|c ).Jgbt~  5i}ztdo1/}d2G1 =-0LĨ"[H.4ToC Jc[Rk#FX:O G4a-f-vbFexŔsg\ `̐]X>#Hu,לM0~LN(SoJ-3 L]/85l>-lf%E ۝ݑ` ӟ&qI P* B>_IޘDJ:bX!3sOMX7 9V,vl^.2x0(g0=7$8z}wJ=FG2- ^Ӧˠگs!Y%c!vIAX:ͻ;3 crQT&!}!M]@X״a'a,r=~r^ECDC+);DEӱW# bpWf!,aX mLw忍X_)l2ۼFtfi$t`xص%x%Y%Yn\ :Ww;+6qHn ]E6㦇y6]\='w{vnxf 9;aZ\1;٤Qq/kjvJUxjsMsy=2>^P%Orm Vw 6ii-v7MM{Jo)xj%]M/©xқ\lvrzK9A? E]{׬ BGA=tR7p@v:DOZIVaщde`x 7Aiq5ExtȳUd mfccϜYe#bxjfj/kG0 茹Y xQ_y73z? !7]#9;u1bߘ }3q+{ v Ո٩@)QY,Bevj^a;A7_X3~XD2hԨ{挟ߤccoŸ0@} h&sY9h,ZRǟ/{,-2;$0N) p0A#{D!R9}[FջZ,Z:sguf<ܥ(>'@T$No0=sH\Ǟ/noc鱡_۔~8<3l }qgkoYoٵ%gw~mvӓ7f+(~$žCXI\Vk>i*=rjpV2optL]rfW(]~ħ?gFn޹oGݳv +Y$iDzM1>}6d0Z+N7s0Oͨ:ň*zIw#ԓQsh Ts*Keocⓖdzah&$EҎ^F387Y&iV^UU3BT>^jIWC<zYfWS=pv2F,a8IgS&!zi:!R !W=̞X.H߻fMJG`}˜d?nȨ]ƪfbVӻ~0A7af]c'BUgKϨz(0Tx47 : +$ݛK,L͆4ڙg7d܊3.@j csLL A̩|yʎg-ݚH8pUq~p~T ,Yn.&͇S>77CTެNU˿&evr*'T ͍rhJH\]nmڕIYp}u'+ 蕉?;[LsV@mY$?WzLy뫿?=s {fwvN+fvs3O_ L"$nQ Ɣ&sŭy %=cPZ!bƽ=ɑD)F?n1s`ܗLz%yrq]Y%emTWJ"ń@i)r<޳i"?' 0 M 6qEP+xJfWo\VJ-@=lI_2[_2M[ 6i-28m$8\CObÌf' 9IuXV"`E,!$o6A} E'N^&j86A0iBnSH"/^ Z RP"mՆ:tX()'h0Pc4Yϛh<6TzR:NOTX)@X?*Y;p=K?C]LSbwa4zR ,9ߒti770$}i:;ϐ`|F0Վz4VH-sm08gV+%#6+fĺVĶ*ckOH.-1œzH`eWQE;VK\0 %U_ܲ$(k~5| |/gHt {e*ZnxI-_FKv-#=o1{hКI^RNjF#uf3p,G6Ikq6s,P)Q9V9eH7M\>qXq" (GA_ i| qZVkZUfU1hPHIc3h~(p7^x[_?.ʒORNG?Op~zna{唜$j1!x,#b$ʙ"\I%U+cN)Or-Bw@6(]d/f&8Su&*\ΟTG뢕fBDX@Hrgl`HO=#ƚѴFѴkR{pXkU5úP ;ΈFQ Mh)0h)4X@"M5iG4iOUd?C/5|^qS9B_͖~`Ʉ@*)VJIk0SV|sgO^opt>DULC_\GLzv-y=q9lJ0]ԔT?۔dKwf+@4[˴=&q ^`wL{7X2j% vT\&WEQUÇqeg}/KE}m$vU~t6T+QH"_eȘ$c@e%8|aʰ'I"0_(!bD "'ew9"7_~~m6/׃`c~ RV.KFj'?@?ht67?|ӝijֵNm'Wp}Y@i*Tvh 7AiM$u@!ndӬ[# 5b@\5^t:&6]yh(FT-2%,]o_:- ?M1[gFZ^Ht8*.Ȱlv<2'[xGޝEp7Bî?k40*t|C vG@ͣM 1(;MT]~:0"J~u1!:-zryu=i.Kk0Fb}*aAzbVdD?E&.[=y񯓒 =#"}_&e&Wq"@|go}TܴnVCsϪZ5e͏|qF/Z vęc2LTt>U ~4ss =+~MJY|>I2/ Etק2_7pE ɱaq{4AS'e9Ư"=&/;zQLڜ~], TX pugQ\3 )W[ '(. t`c|!nj9Lsn1. [?-l"4Ƴ<~#BEM"z #a-8 G0JX8L $)Q^C< ·XKf4kiP󁿐q'R#K 8C.4rN:(xZ,!Xs̴f.mTRr{iǞ)${"p4}w4![0 fsb! ANV>yg EB Qol -@&4ZhOQ0e ?"-Wss*K)=} (J)Qq*iai ss O (p^-/h'DP:p(&3Jku( HΚ{-5>LȑVb1*`4Xz/-yqÞgV4Ӷ̃SRP}ZDy1ŠaND%< C۹p<8\G59H>C!8'9DDP$"ƅ<7(H \ϙYjL>sԒ|FcĚ CPѵTG0X:QO\-֑Vh&Ib&L j ED2%]<3=Oy3@fnZ@Rg{6W!GҾ͹hRi8`I.cȒ+JN"̐zPdd±XfwvvfvfVuJheyD֏LH"d-I]i^x<7è=$A\I05\Db#TMyi@N8i"K%{Șc@"S&DDkS f #Ї\qJcꬳJŲQGRt}/d,Aw HX\YgIʨԧ(k<"5ʩIGh,,n֑.-X,th٤a,]v)[U$ Ն+T]Ʈ t⩺#Wn ʈOЧz mі%.~m \ /JTCYQ!XEtETTR'DY fz*yܵRW;'Tn0 # dVl%)U"xdJVH+ѱF(MxBboSQq&Ic0#64o IK|a[d-1Xj9oZ+4vFޏ8F-Rh#|"a1&(N(eI ͳ h4]sIO7l6Bg!5ʲJxJhH1Ӆ~SBva*[[WάVrO^iV'E #KNɃxi{]$ӭfu(E:mRjF=OMqAcR%I]t9z-(TbJ*ڝRVFKO pmJ z֬ {&*YၒDi˄c\FQ$Kt oӄ(&$ދf]UzӑLz#*dwFPx<*V'.j\3[zpYˎU=gPңz`}H@ĀЁ=NSt<q?;cqxx1vc[ݺy/'Wh& Vm  ajZR7E<|5>Z@רB +$WH M]/*Wc\}'iMM@ /; RM BsLg~zHMxw,|oqо{~v.|4+MPР=xX$saaaˢ3T0Kfx|x~ֿv>\Cg{9@}o|o}҃6!ZI]׵nZu]ךI٨tUol͚S+,El=vECV0nS㻜\T;̪ߛ°*j0WΚjI}Kˇ~%l h.9qX476#A)f?~|}lC4%z0Q7 xB+̞  : óߢY>{%*2՚FAMVq^:g'k0$pCJn-TVa }?hTp b9,݌(Z(0{~ tGʣ_3jST޾F$E\}_Ttwr-~z,(>c,H˞(9:~ <:gbx?R(bΦWZ3&;_U!,즿jx=hpe`ëjYf@1ˋ߹}!&Nϳݴ5zKaw BsE}>du)Wi>rf$0ܯ'AI4sYQ9F`^Z`r>cdJ׏KP}~ t>''H\ۓO" 2ZqaTfs{9oZ#CKXyF`qXŝ79wG>ݠ`^qXϯ_-1X{?]@/^M'0b8l,hI( SY(8IB̬ cc2d3-%Vy #و0۾Xùx5`ςڈ q j:g@7$LDF:z*PĆ,xɢX0Ll4R:O|e{CFGJhsYZ`[$fB&ƒN]'r6b;qKZFEX1Df{:n]Hz~? xAiI"=KAVxs_Y|-P@0e<e6,-HXf4Ѳ<$>`JRҙK (u T)B;< | DEd${$[-e9]-lfFxpoN}Rtg>??_|J=BC w<8si[?GYn2?{9iq2(?5$z~'R+/*{$op| p[Y|cNNF.qU3Rf/ Yꝕqn,a|꣋R~\}cvtҘٍv>_Z`w1_fe?%zO}&?k#!& Å.XF\I"OIB&Dk^HΔt̥$:e'1glEOՋ 2דK *AqmY-B]Vi_G—,x2%] hJ@^tϡ_+X >BM;vcxu\~4Kؿ27W,`q-+&|\_p c}a?Í EЯ2]6Z5{E}wYlim;wվm e_orSaƾ%7tx>͆m6x1мZ, OS;ڋ5'7gqAInwO#wO'Qq Q۷^}z>t~vү0(z d;&1@rU,v# |эhMl{mD`+&IQojݼnƣ8Xhafhnq" 3` "FJHaל'[nSwmCP[9n 1Qb,PGSy-G&'S+AsKbRYZfW*6RckjexڊJ^UIUۣYR;X[ 'nU{vB'}^}uv oSut0&OyP4P:etC۹s)}xvko?ߗmǖ,`k:a;뉕7O{<7O{<7Ol8O ,Z[o [-U](/Ӥ#@ >4Hvk];?-*%G|NR* WI+Yi)U46V<s [Z}W'qG'Z_ɗ7*]O}] *Rb);qƏ<o[\,CZcϠrM_ƒhiwΗ&\n8CRZR^%/I%҉H̊$iȻ_#nZ'?D~ɍ/T3Xr1};?h#: hT AUDR)S+iS#(LG9oR"鬹a =U0&g9ᾇӺ3+̒ޢ`V*GXmJ! J,H\L8L5˥J6`@<Ve'14jRHi2 ]D}\;B-YPV|sBtAP2͞90|Hͱ`㹇Ӧ)+? <׳٦Y4h+mh`qQF !{1|Ȼ]oKBw:O:" `DC`D=D*3b#8 89yGs$B`YɹxqUtH&{"#*pQCрGrGַ>MSgX|2&2 x$QJkJ N܉" 佁?mVv=R_^(_Ik"s+I1iG_-& o?8TmL"2zM,O%TgcONr 8ϗ4_x )C5Ŵk@#ft|9*ll ;j1xpt؜xE"YDKi4jMZdFi4P]!WlLWVq!STLB Y%Ҟ㝌= q!7T@t^3R)ܘ1]r$}KGBR`ަ-s!Us.Dw~3p]2DJXD( AGX9[hDE]Ks舎1YF-Ҫt4{FY$Ӳ&.vRatڗe{F Pq]ALDq=#~6[<+J/8j$' `^(fQeEԀ#X4Gw/aw5iTGgُy )3,/X"/JO2g1]9obiڌP]tĥ 8Ɛ%U JuZd.GӥCwS*G88a%XM.oQM"@ yWtp[W0BoJxDJ0L pã9=̉5m!-iN`nۧ6ǭI;2񃡟dwMoC/'[=M; 4eV-S픉_+EܦcFГκ¶*1NuPm{O)[ϪMnTV![S5i3<om< ėazNZ(b= MiNZQmjmݜfmM[UzJ79!nS#z8{RﴉWkEx {YcSq?TY~ޏ@?@2xHOJzie;Ua֪B )mo#zn3/߹G-NyQX#}W14TqF;bEc5}WѨt8?n~| nžm1fZj\=48q~QO*.Rܗjt#V*xKu-# 0;enu4t^z! 1Vx!3zQF11yѨw<^ro2h,/3`}y znw:ȫu"c`u|zqZ-)HCкQɭo9v=Zt%\]Ѱp?88;*;PEƈӀЀYDKw|חQQ"% JBE њ #4Ȼn;N 'ŧ!*˗9] C'i5f^x< 5|]Ck)oi~u{IagIJQ$\wl߭ @ * ܸޫ(;X[O4w#n'D\>9rĶ6MS"`f42P^w~BWkwEe?sUspD'm܉*Gb6őwW \R9^/>ϳ nL_-rzEMcQcS0-C>#aʳɚ9Z %F@hS)&MԞΘFEqDgԓ17$ցR$ra!(%WQz6yWt_c&JpHM#qeM j/biݬ" βK=]6ྰUnTGCbYM򔈵ASD ^7iQǣxEr5JߍFcẅ́ ބC9`vJԈeԻP](yWyP|5]_0+ǬǼP~SU(j6x gg}+Cp/.|~NiVQĭn=ū,^<tz *ѴezzAx(kWmhwۤ9z@:M֋-yG]U~Vkp=,HbՇw:OtpsrDҸ! &ޕDtqꜯRRB_-[of&׿_,L t-џ7T/"!ŏ;z1_B3Z旟76rM(#+[ _r(݀ nkt}=/ ]w8َTk$lwu/KQ{uw=gˏfف65b"f`g`x N{`u?Wמ+1཰HeHQ)!_184I(ceur*02}6w?χ˛J)tдblVt֛wYt'5RYIȓ٢OV5LDS#ձ ^qw>sn*gvy7ĠC|q|j d[ <5[(u3ss?+.0]DxeN')8 JN6~sG,/g@ ,^Y|O7)>B.>Xi]ܭ0WMvrLy`ʰ^ s>Fq#L·RFRA* sy9YA1Q:068:ÿ^0Ah.Ȼl|tXfs˩ P.R2 NZdQ]!=&D:׎jJ*oD51Ҋq*I:" Ur\xx]f is<ʤHjBNr 8򎐛>&v/0P% qa)kpVA__pP"c QD j98;)WoLDfԇ" ^ٷYR~M}؊0JFe;0ı|Pp4c/)c+93mhvBVɨ!GhB" @B;ߡGxϰ8fuTc^rzk?v#&ChIKS:~iã9+v[uqhW%{--˻?fKFYtżs!d=C(3 ﳁyq簜;;C^$$A)o`.&Tڻⶍd|Fd gbư`VAg8r*rEM<}cm,yϘƧB*js.?@aJw17"!?.Kx \)h g my~7!#˵'{2&YmzV}ucjs[xd\kEaoK50Yʢ`R6BHWVs\+}ܗk4ry_s5 =ɨ39ր/ףnF/|y:3óe]#J;wµ$΂i)AD<d8ъWU`%5zy P({IyAI#YDPEUر˱9bQ}E>`+F([2Ruxz-D5b&D5pU5 25-)W{s5%YYy_i霏7 1Y&LQM kp(! 2#svȑaPD0(9/lzZL^XUUֵ5a+ L.? 15K(B uQuqBu3njH:jZ1zgω!x8k*P0v(vYeft:  @r9 5QQDY9IlbXV2[(~"2\" h"֠yX+FmK0l4ll;SUܐw_l y_1K>$q/eUutSwxsD8|cT+Aߖ<"n< yǫ1*NLg7BPM4rc6hw/yB@Ci)jvB |(,}/ʓnᑳCYTOJAlfvu%.xF`z=_w[؁xY؁"+]Sd}Fe4='{z]j2eQ*3@2uojf_[98apD:[f)\7;_^cO"4~~?G$lX=wpQ -Y澄}V*cCZ_^\xAN4c8"_,xr]E"~- .a q2h .y "+Hlq'Uaz>ȎRd|1C˾Oϻ*Te,um )m ڿELOzI|ȵ.DLa ܥ;/J<14N <{k`qǡVGU,GmK{\tVU{9ôd16o 7l5K|׎૗%0PyKj|)Kw(/0q78 /xZ ,Em_kTFq^; TZ9YB$j8; ῾(ht>pqaNxJ0]YRMA/T'U0?q:Y#ҕO:XPV"qvk>Fq; U8aj,P*IV-%Vz HA+bD`t8\gw! )eAfzQ2ϳʫFq}T.]JwÔMIۚ!\g"ZRj%Ò|,of0TXc?-]8 B9ͅ Bӡ+p=Pi\_||(wx?MZ"{rUOvU>Ttf>P |q$_Ҽg:>P@v7CՑ^3L/x=΢k!O>"c?(ԠE+L.[TMi siѐ95Anw;:SO+ij rJ0/BR{8OI siB8W C5 T OfO[b&X8^2߷2,5,4 !k`0MaAPJonILRLJa4y7;ERP~=JGH%NjX䨲C 01{>1Ac[ rLx<}0Y , Ǣ{/L\~`\j8&-^p8j`I545EtZx ,aF$=ylh+!Hu1 ߍq_R /Һ"mL㧯,YNä9\|`q0q;F rXV\>'#*N\@JYjqFvc`us|~uA@ GʛjK5lg8 H-s.FK^JA/A] Y!|Yn;tuIO!E#-1c룢@ A j=,^toUCU*[i/(nxq%a A q Hs0VesCİbLly?=@,bo>=b_xWC\pȮ;_ZB'P{we:GeU֍18cRt:{?>=~`+%.~φ+xa1WD'-4 s߃1}CP3/^aY({p/!VLpgS?WQ7~Of 9 mmQwFK)"ϊRV9wg;M"V(6-~sJ-RmA[Vy(L|h5X۝ؚd/[}k? 3t6MptX,~mzF QX~{Lc,OQ_pSc5u;ӯq ؕ?*UE$GgψZ 5jqGqXww!n܎9mm\T8*&oU染N7|]`40<{uڧPb0m*g)xCf_7jE=֯qM-=-.MY^*Mu;p6=O8^$҂`OqJ-ǗG_=zB QX;~;IU7,sRyb0YD`LI=W_ߗF.<c5~hT6N7Ƅ)+w&Һ )fŝ09RuIttw8/N=N^*F>#(0D+0o] QvV%O˞2՝ zNn`Czw-R&LFk+% PD/0[8]Uhĺ;Ce2Z&1F[Gsj_.˚:O &C$=]S'O~hTTG# 'f]y8'(&J*KM U6ZUBRZ'wvI锄1dy!iGXE[%*3R,wq|@v٫)hksF>"L)n JLr<ǃ֨Z5xGFa0338'Eyb!C3L"m)6d[5! k25"bcHƆ͞+d Qu9k"w{e숥JC됧`bސq> 臽=\c2(Ly ^?P>_'| ѯ98b^]3`>3srQFY؄&'S!UK~|Mpp_a8#rb0i>śk0RTF%q5tp 8hM%*R hSv82&R.[0%_A*.H:%56ː/B/ o|%cR),5MEG-}-\qm`>.nBFwm0#8k܋G0|1`NbsC%we$:z~2PJ ƻCPI+%ΐU9S˸#dӧ X >x<1CbU* ŠSND΁o}7 ׆ąJњA>%En)rfmtہ\u99"Y(zԝ602%2r{]|AAH*& PR-sTz:I)A툛*J P@?C?Z,)~OrjQd јŏSKcxO(b q#'uNq6YNoh0Gk&_{ њ8*0NeP[~|<aJYI@7}@Wc*T􂀫KbCD:'h#l,LaDA3ٍ L]" muoT *CQ#K߃RY_ X6s3KP?YILȀ98Man o:jdEC5IU!f@1`St3u/VX!X 1xwl</ /'㢄ogԗg9{ȐX"l6!g3©4j1xNYa  U"=S rrT6spQw9dhʾڍ&y9D5x=+vAGE"Aoz7Ҥ䏪>-[ܰL DJ@J& LXuCLބmL' ԽGuI 9yv] X1a48Cڗ -=0bj|~u"@X;K+ݘw8 D'V*m:(Si7{Tm6X=18ݿ:@ ,\^ msWMv-ݽ5?ぼa ,Zu 8ffA*S/,";..42!y~*%<\aBJ3UW4p8 ъd]Km9'^j~־oԱKN(*c:Mehvޥ|T?h$KR0y":H >?k@b*"6)l7(Iͼ]֒" 2G!8Zq#I*j: RLup~Ktcz>c.9n}>ƅ}rGRw$`Kin긍WU6@!$~'8^``)1<6<9 Qˇ_ İ)Urvv +巸JgY;;n\WIa RNe'nU,Ўt6/b?+Ƚ>ʳ{uf?V郻DuH|LbndAFjGf'b;{9 .A)`iv9ǩVw'̻/ ) &uQlFZ6GkmrQ$Y2I~`2'Gr-(tYA;+Գ[ ?Im쌍Rj؈tFE4:Qr,0c{6Sȧ8WЍfLC,Td u]y04Yru=My Tau =۫Sv0Hc9HE|:7TLq{aם^ʔ\,;,aQu{bouxwȭ"uXW!:W0+S#ͩ9gS}ʹ`;*Iꏊع5Br#5buMIpN(Ks[9yۼkCyUNvo>!l k#%8oT\Wt'cM%T˞4|el-(Nb0$Yy}eן7Y =!lCiN^a9,MQNE,iʔZ4e~ q_0L/ VTrC]ϟ~W-Sx?o+9yQP`oPIsL,-o ;Sf oCu4_V܏mƿiw¼TXWxȎ+!^*޹%Ÿggwiiư58d6jm_v]@<{hbL7Ƿ0߬O˃g6o%q\^S!z ٴg>+y $[AQuIej 7Z—~`o1^k~m( I"$,Jc0y:(Z[MlƲ\^S"&Uy*b~exn.S~澌a$m>eTVO'^vvL_[MfYx x4h0ɕr?ͯӛ)]ҥZrAZY.sp rbZN=S?Lh ק| t`el .3~0+[<_yޟy쾘/ݯ˟so vEv=(E7°eAC_yTh* 6QN2!o4>;x )LFUh-^6Af&_VWEzfˏ~fϰZ`벼_^\*bG.d%{~ P'2^=>ms7 ~bmIꋓ3PgY kҹM3iM? FgoĘpb7I'иCWиn6pA^W}{SsSOV^Փ8^]PGctE[yj[4XRncFL RF0RD,IR.-|+!XkAWMT K)Dd2rlI3LQM” n1),adUW=RWmaJ8;_\iTl=L+uyaovW'R{^!stK\o9;7૨vѯս3t)&LV~A^뗲0Uvkz4zվ֯@@@@@޵4[⺻L6ޏEYj*5UL*L"ezX~HJ&E"εl_y;c#RW ̌YY10f,˜Eca4caHW &Ȏ5 MlE?w7 '酝ٟ[z^eWcrn<\lZAɶZ#:?ah2SPa"c5FG.0b O`ɂa5ac|zV4w>r # <$D6r*pchUb*JbCJi#RR%Akt0zc@ZU07`-e-nns4x <꘣-8Ek6~}* ML^(sq 㴲y18tKQ=\I*Kc~HzojX.B'?~EYk)-wXs+!s ˨` d͍1 *11 @4Ro 1 gyƻ_G+YudY.Jۼ< C?)Q) `1Gcp;}mw-_$9)zp&`1D , ~:[N.r]'("|GnGa󇋯=JRJAv "pon^5?o n&G]DzmcIFF.ڻl\V8՟c6~r"eHj!*7~X8_:Kz]\1.L?NeD6T9nRBe ~ ߇HQp\NKpFE gH8Gs4G ؘ9,~[bQ Ѱ| &|)`ɍsjlh N̳n -m̛ŔH%e%o ĦAJQc4ftk "c0q"0[u9Iтc(3v"0fN6BotanZJYKm~CAH usb#Chlw؍Чl[RTDtLϒ( %Yuo$!U C&E Wts4Z}+w9FQ]8v8ijg ܿw^`f°6vSC.Qe8sX+W o(bVRo"δ18Ts;Co't=e#GLL rT<$Ƅ9Sv]\Wy|"vM0A0>KJ1Jܘ Kp0r,K@JފȰ%o ,wjT3Yn>_-W0]Y]Om7ƥtw*v@A~=)阣-8sZzEe9T6AP7s4BNEݡo:(R!pܢCCU56ZwbU`ƀh\?lG+am̝yqͬV y?>E] K{u[`Ab88_ 6D' 1.Ҝõ.҅7b؜Zс- ʼ;h Nu=Uv4dRفPoeXdն-y"ڢ&pL:Y3|8Oc\-GdYD?EUVqWPtP  7З"苨\ˠt'}d<-B-KE"+)Ihkkd?Nᰘ/cCYS)\#H^]u2,G!0lHAА zπty/Rb+ٻ%䈴DkLXe8 áy-mQnLDZ;R%:RQ5z6UZ\l0i DX^)@_((9똣18~;pEVcrǧHlt8Z5rm!o{ m",8l((GBj99LJ~.eRT1G[p)E%IY]lb`"e5FG.0b#ʡqP5Xtg`@H]zIX{4>6ocX';yKMhQ{Hf]׫~ͺCUtuBJяOopZsׯ^D WBϽ#{;KK4zq=&h 9Ki2w #%C4W'p"`$!qиM0 r=+8mcPxv;ze!hlvCoz71Zt\Rb*%f/kE\Χtv*(&E|V^"cKg`(b\]|~R5Ĕ?-n:jDa]VLӓfBʘNB8&NdIru{ZsNs%[OT񯦢Wr_wG(wʺ!ƼE]12[5 1333!OSmڽ~=0sRd>:anƨ} vk7xL|y$>5X 4'dQk#tzz-3NyI$IBGįH~nVס:E>nFahR5jk%ף T8U9ڌmq<2츞\LfH dzf0"\ʋ M 6AN8yNzl.w{ q6J3JJ%6έ!9xS"'47Hъ0:$p}&gLL7m]&3~眮xhj[v X ?2LLʭfA*/%SXqt&H*a7Q;䬣'I7#"zlƛ鿑dJ/wέ:_ŗFK̍rׁnc!3ǎ#;4\ˑ1y9&/嘼cWjQ5x +h9YFrϊ'%;h3Q#hm4algz>~ [^kN0K\pc;/#`4T=ƌpecF٘Q6fecF٘301ƴflHF٘Q6fecF٘Q6f}y/Tl1L{pI7b>զ'ܶEvwۉ<n}RDaPFaֿ+ʧX)>yeŏp<(_{t>TOsL`>柖v8yNōEXO,}b{li}zFB ӡ_í|[N~/G-۽v`D0}G}qlsxL|Èt B)E+#/o[x!3v11_i0϶/.y x[|,Vůrm"W")Nf]5$ͤ{d6/xPy3R2._402[39^_ D-X ,i.7 y\CXlWŦaK/{H@4 /1gpm X̣KAЁJ?/Ѝ0vDT޹շ]YIc-R5.n ͺcHƠX)b+x$+b8]-ևоE?Zuh\cl})ͺMH1EczŞ9_/z!8b1:XuX_Y2 /p2 ];@ׅ~\=/^ċb_͗e2)h0 .* Cy{{ޞ{{萑H!ꑙ!{!{4 H=D}QC=Dj7[1߇G!!`J!{!{!zӘOڿ6=\Yr `B@$N)La(( 9B1ezX@k*U6-iȓ\.ǻ32^^VɞBǏ>ԿTՔ-WS2[FI4. ĶWAN`Haȣ$N!$Kzכhevw^>ytu_ pn5MR?.L9H7_חGr Ӱe^o\ތur|WbR{˧aᕬ&SDz4*gF[P 0NJ1^3;4+W̷Skh^ݽbyY)!Gk1,iSehW:4\m<'?c uz蘄ltm歷rdZ!po"wT wE7/mTR xT8ɪG|Č*wxzzly7:?U*3OIK ^A1LbE) ${\ȗ qڱ ZO:yywj͠?qp'b߁^O/ۏV~-7i`_tnr:=ufq&ONF\辵A@yDb}[%omLҔ 9 $ JQH(4e"}[ 4Z^W/՟? kwqWi49@n;Toel]Aʓ{ky0.nMXB1BUkڷI6>"h'YTuƊ]H3.Are@b GKQ˨`[,A.ö)ڝ) Omrw@"`DVVI(cf.AFZ#na \-zƸUwA1!4=P-su霠@kHȇ} -`eBFԕ*?~}k Xwu e,!cгGR0BBb"y*h,Y(1Y*!c8R)A1_UGΥn ;Ovdg3 )gNMwj7wH~4x֡=5Z%k3ݒݙx?+ㅾpe :~flʷ2JFh}z7dslP2;ZL]/ݐv4A̋A5>ovP}5*LÖyzszp' ai$0AP)$HÈ*U()- %icIe\Q+H[AuSk\ ir=~u'd2Ϩ}O=s,efvhWLo{cJ1RC^'cYҦˆѮthRgT۰y O7(ֆ:]=tLQ[+rt]ziJn?]UoO=nthQNO<|ݛ d֚P)UDB ˍ.o&FF[Q,j6˄{\ȗ}*5 tkWŹFӎ$P&6Z X~z>vo?:Nwp-7i`_TvJ># : {T<"OJ6b&iJHP\($p2 p4jpmSI90Hp4f8JAoq$D]fE<^xm 굋a6(y^\]܆=@;']G<݈ᐻ NS5tҌL*AI%F0@L-4H6NjilPƛL~8Ρoy 'ki2)mЗυZ'EG7)"='GKMpl:ߊuoncE;"[iT2ܦo|ɷ(]RrhmMY4 ix#E&8JbEBm"Xq@ER`)h֪.lzq6)H#s/×LM-u>Nuho ]ybsҫ͗,ԚɩeZr;*oޫiLðWPiP>mլ^nm)Q?l.hMKpӫ7W{56MË@=+ҭM˰.:,GJL.YnS2]A`zc7dPDvdVXIWzPȐ G.Ԡ?S+q,*TȤD6O1) I" ID )"&,_ߔ|*Wx3/ikVmOU`?Vga#H=(D@(0Cbjg# iq ;0 vw&MH5iλ )WCݙ~@p$fanSk̝Icp@VTtIeȨ#wܣ2bzk3t 0Q|, 3KN5po&J STݿKlu4?1L LxU췕Ze|nfk}F-b25Whf{Bx~CXS n~|?Y7_Z=/k@X޸]jEz=h;nS6}}Y+Xw>}6f'yy+ԚWK,D75F# &^۳CdF'"]"vScݧ.fxbxOgGKg:\񲚕]7 OWt.u?,c*~n`0{ {Ɋضʑ]g{SVpev{f], 7(n7r}m1vKZW8HnCM|"ЕhC9oHR*Fr.9HP QXR~$I@!c?͗^ԗfDs֐Hݲ8$kLsbdÑd9˞ee{ƸM_`dcQ1bBʅ{mў uЌ ճsF&c5S[Mq'cXbA G)cL孼3Iе$9x+ \m' P:#SvYk]&>qv귉quk೫ M“{>hs4dXX x~=Tse*hI3D3F+?N*F O5"!2VbG!a" XEL2M,! -;5t %5Q@XsˤDL\Hwj4w"Xc&^+X֍&)~\rXIv2{tFb!'v7*i=;H2GoR<(pV+AD'9é!v-7 37HSQd׳Yٮ}NzW=sRnϟN5D']vK o#gmHbC zi.@M lÙa,"e DID٤c9܇cy|=g@_g|+Kdlzp\O[l˻7,vBWoӣ q MfB!}v6+ߛם*mJTup9~uw5ZTo&0ӮTi }1!. $oFoy^2,ȱ8Ԏ1(kU^;B8G/08[`F]XIp v̀潝 {AOI`pX OIp}҉ HgdB}^WJq&`Sѵuv)Hpv<=Z=)AtN CF@ )ہkgR-7wz╫W.B-7m&Nnq(˛N>~8?.˖3wk:DT ÇIc0r.2Ɠd5W趿Vz'Y^|555SC0?S쥳.Sx}P$4dgeUEB])0l[pw?F#wR}k*?(2g`y2k[G`b4;_g)X; N |eűFSbb12ҀYB&FTV&Kf0 T98 uIR;{q E.7gрn3K[f^zcsNX$Aϼ'kӞ2wq; (Z)FuÏ?6W확A¥PONg3U!eϨf wڕ) ލbY@?"xg晗|יc?hqoՠЌjLh"vFGjX&MGśיz5K&3zeX`ūEeĐmVuQSW@:𞛋sUJUSʻXCQN*{~R D%5| ֏#p@C;30A5haLo.[)ɘX˹5I`{VrnTj%rL,m\B({^ ݼYTj3񱷳7^,2G:T_juj`^o5I1λlō $}ٷӳd|vIQXSy[пMȺprzy4{`!vƕlK {gWsPTJy ejK,^ ᶗ5/3Lͥzxl.@P> fl ^N3aeSQɑ٤&ͼ+ܢurU.խ+Qđ^_ܿ=RaGZe-g1$ A("*BDE,` G*$*dzɼNm|>Ϲ^~9xF\BMgo!lx֮.wzri-`g'J:5.ElLEd (^ eԒFL'#*a G(U1Pa'T@]-[[{*@YD&A'gc#b8Dj *&FbXQl$IpHBY8~V>q_Lb稕!NGt64ͧY/g=^\ͿB_./ut NP)!,@Ϸ/3R j)*uL .6W v`Ni~=$nqD j% wGy,9Sד14zXuOqaa gbM (M)# 5 CN)WP}~dK/B\H$q5x@q-0k?S!P:]l(hG: 9E`!.S\ӲdRt|3Q-bjiiPRwHGV*?Qɬ tO#&>n*.V2$fwhsǘ`^oǭ;r 8jPĶ:jQ!*~DsTFU`s/Gn &ˊK3w LH`?0 q`U pbkoA$poP3Ajf;v]+ O[')Յ=u!w]Y$N-wjaaG29iutk;m[a[vxz^5r?>,f=o{C8ߪy7r_[u~=5Mw~((K  q-%rFPZ`ˆH@.xuP{.ElLEd (^ eԒ 8JX' !FU T =0P3ctܵv/[&.7d2t Թ Ha_ \hN#N1h cLCKCkZ.xA8A>! JG! j*SDA &cͱRs@@E&f&РZTu2@cOB e0#(!E&ҒZn9JAT%8Ҵ qh9P@iIi'Ma=E]'qyb^.xOLzoYzĂp]|<>,֦50[4`^7*1} _G~/ yu_GՕ|1sxd7MOs; ,##[rLB?;u(rml40{y"aLsb.*:ЋBD)+N# H, Ɯ+% 9j̖ K2x8QԻ@#/6h@]&%0S V.3[ _7{~g$ I~f\d 2j6qND*.3DH2P*T* Fg UE\! T8( LUF*8`rt^Lƥ0a)J@A(< L0L`dIDzɔސ."PWkGl.iQ+mjaS:K i`N~Vh#䍷,HZ  :`ZʓvkIBUpw)6ծ6|f$Rʄ`3+]HAP.x$"- >tA8eg P"@=?3i[c4g?c0xMYW*:!%JxL0F5Q3Aԇh!!ThOڠ GR7Yj/H!"CpYAsIH+$H6I32?]d EDM!um#SZ'=@˙ˊ*,16%L% ŧ+LG&\&*Lt(^87:Ig"(dJ JiAFhh7 Zu&"0ˈYNIP"=F]ǟgjlt$51aѢ`t%׌mA>L1y}nsQbF(:3e{mJ$)vUK/PN ;ޱt vm[jKRᦹ^ݹ|}-2(d T VJ1{㩎F&9n)DKFbhQ7&M}Cneh~W2dZrΈ]N*O^Ux(H8sss'Tm pȖ,gjT"ZZIs?ץG򫿖볃^\iu*E 㹷QBH9&+ ^uzdgۛ:h~?S[Jg[R .)d̋Gnih͡>ouuLVm]T0f;ҟ޾Lj7W8zt7t@uf=Yvr}\2Ғu%zH3);%8z++O6vNg狼gIPG;AE{ާJ̑N'9B8J'E!)t\BgVK*5/@,j0c^Gi;ݫU|8>;Db7$/Iٙ2y8;dt}A#1GsJ y\] LS!,"嘠yb*'2gC6շ3אK( V(TH#8xh)Q1*טB)qv;&4>}!юa𥉉_JC\lf@>h 5LɊ:9 Qg" ޢ0UqKCqᖧ~ʺ3GuLK.;3z[e7k8ځwFzBMV s1*<Р3=1w(zy}|Z_ QNy[9>d[.:s23PL3uۙs֗}Ո+{k D>XŅ5Suu<Ɨa;h|n]if uYc$0.XI2u>K9cv@0Xno[ .vv#ph`<GsGB*D6L4L1߉4ɤ^s֫Y 3` KR%9IǨ5г6vV`f n=_?-1I%h X00*"GZAT֤σ1TXLOx;~o;/ Gc O,j+R.@֢$ jKRUw\ @]q9NjaX4bɸ$9g@T6z.Y1P"!*] ыLJncm/E'VXm+F-tCRr;}#VaJ)sBSh%) L;MYvD<(W}Gc =y)כk|gskaDJ\(sBj0iu@ <`/:zӐcClk,~kt/K%O!:vץ;;Sj Q d_8] G"J_q+.}ťW\K_%B=ax:etqrnt_sQhoPummU`nflq~~EMnN(w޴9Zse_VM:JӠQs8j9ttf`.n0rD?lLj/j휝fQeL616ĀhKuy*֢Q-=J^~m~:=wx Y ]H@`B>?\\S#q6?G7:NԴ vv8aƶjkk֗ jW33keexTug SWWB^Y+ ye!,RЌB^Y+ ye!,䕅WB^Y+ ye!,Iy2MPP υ8_ nGNʊyպ✈*TNr (+9YB|.[=;Qu+ᄅUDHPNCr'1G!,BJ,1DIkNv왇sYm _vX}$y\LYnBx|09A:vT> :::bP 'CGӈoVO"yj:~$zQW%YŁ(+5CdVX;6+0PT=+ zO~= ݾ^F]S{CiTgT\ uWf_]P*&}v*V@Ro:o)UTb2DL$*&oDi-c@$2?M֛ Hcfb^ `RXfA`1x`rj=pכ'G"LBsmY= c霟^Ao.M؇EDJSbd)*m;JIm45QP]6x{19z|6uv=`&\ƍ/x~X+jr2Q5ƊaGzEڏnv7: 2'8Zߖtm~םB`jFgQ.v: tOeqָ,ZΙ- 䗪@Rz*Kê6]#oh{"_X@/uY7V/%i#qeo?7!\tm+ [4(5 즓.Sdi_kَhZy{};tkF|iOΫ^_<]ws4}<>qa״҅o͠2KQ i 8(,U[j޶@Rw@CbP灨@t"$_gB$ Pnw9e9=mnSZ1%Ip(m{سR^6耣9}y2;M{51~5@MJ|*x`[p _(` 'ؖ?z%-ZhN_|=tEbQ|n5zhL#FPggF\юݖ)_4 v]j30hx> ]pI@Ѭ] iz ]һn>CQuFn?yÇaM$yYzka e {enݼH9N/!^ d@7;|dRʏaaMΧvM #ogIb0a02b;̆U♍0TlbCWn;9Ծa ^)M/ڼ~QMFC/Z xmvtҶ~(|L]wTQ^7W{F4'޵u#`.ni0ܛt7bӢFTIN޾Shw05ONx 9N F|V`~1RR*I*%<(A]21OFRc'+/sO`䑩6uɿoJ%@v'xrRg9ߐxuA[r澻}¾IP /0@p}%hl'#GA;ָ]_vc#?^#U6F)X;MLq\9C8 F%R 69k5I,\LK/>O51XxӜUyrۗ@Yn㪮yG[펦۝xǓyf^;;]+:X4MlC="qF;o;s%GZ߃ǣog +iڟsl\b͎*UM缧<F}#uI%WXR ah\ڟ"0̅OXq.!νBV Xqe=]`[7-/ujq %NkWqT~n¾YVV>\a{Z/Ag|ΈgG}_tAy>~G ) ̋ȄkLR-cFlPSR⎑cerG$E2>,H)DK=;E`Zs"N)}b-wJa{,Y,YE]/IR9ICb$E*$h1蔸U1H"%Dp@|]kzp~߆^χ͓߼p8:La @1s2!<(jJ6rDŽ%t~$, wZ6SzSKZBP~{QY[B8ze+1 RqQpFL8MY\p^($E=Yp n^zI%)%EKEج}a1 NЅ ]XSC.t̟%No`p%pHk!MG1"!!jsSqߓx%Q<~9 f>!ATp#;mUTie [vT&K!"a2N@kѢX:τB)bc$́fU{v(C(]P`p- gϸ?oɃV>RXT.F#y!sI%^^2H+h5Ӆe* xd̷Y0N`@Ii-rU@"A];`+5C^Q[l{vgVºkmÆxE`ۀpB8'za5Ѩ`3(BTV#%XZ3*rX]kX@gZ# w6g-yX< eStZYvg FU8Lh6eg׍(4+R|[Ul7]h~?UkO͊%t5Q%^(T5:_NwgHf6diJ=^˂؂k|2}uXaΪGKa\VJ2"-04-xrsfT.:;c1i]->GgRV>'08{;9;y:HZ#E7[(:8T>9!gu'T(\4lX;rMlAws[GD]LG|t$c#;K7>aw2w{KdQu$4fB`p `u_4lﳃxdy@r[N]x EYhȿM$x8~ȼĽG{ȰN_~|Wm{̇Xz69¿?ull~k2C,o6ؑ`оY_.wެOZ]}0^R0_|ۀ6֪N\!MK[Vu5qPsty=5Wm5(eWhZ ~pp[fnvXXmռVէ;W=6.7uC7v#2A q4u`Ŭǣu7ѳ>{x{zDXE d$w?STScr9tǠQ-J|X+?v+lsߟy}~ߝO`cf`.MCCףDy]s^k=$ӯ(I~T} [kbQx3^{%crͮ9b,} b~Y]Qy2? + L N,*ƨvJI( H<>8RmQN]ڴ!ڲ{ˈNh&b,c*L!QQSi=G"O<0s7).|R |wuߴ:0m@4y||ء.i9N҂Zn՚FX7i=Ғ5IU^f4pYL+I4+%OxR}O!%{D_ 1cy}&K)`3++RHAᠤP.2' s5YG A(czfl 8PIēa(㜂 T^ȁF49S.d!U_J/ʢ'Mn=íl ͻࣇ1 7μxI[)w#x<6ɻM_5p5K _uc-# N0/=YaWn(#K&2=ez^y#ϴ_[=٫?rg:甗כ9j!׍!-s9/``FLL``F dL1MJYқ0}ބ9P,2j% 17deDZl9kUIiI٠,5jo\"<N:Lk\ s5 `\r=GdAw2R, 6yr*Fl3ewf ֚(;7_`u-sQ =_Ml%'ײ÷U UW+:ίYN|8WSZ\]T 8>ނY^ PA)n_xjKYY&AW+MXe"5&Q$HBe`ϻ, ?F"2O %T)v8pHt5#YkrZB V4MX]+D9d=MlVB ()`PJHHǀeTzߧă+'"JG(g$@DjB aMa.hIjG $ XrۉߧSēS lPWNY gh[IR(6ɻ@gz{qW1F(Z޵Ewbߴ($JJ86Xw?jfl{%i#y#R$D1gu0OF39쬪:ɼ9a"G&J(Ϡ0y ryA;*CtkCN9F0Bp)yX7O90ChT˂A{JDL'90tg>Sde$i>oId1<4㙯M+S\'=yP\qP0yhoi*9[ V5ol~!{mW궳2W^]_$-"1Fl}T C@=sR?uj~|]Zo$%kM].rtӌkŤSt)00,t3K/o~~yZUx?3'íOqg> ~ۺoꃝ,ٳKJ3*EKMce#\(6X?rPwrtD* ab O #~b &ƬDw_s6-Il'l~{k+EC$eJO`qrbk@ gP||#Ya\C*h1-:͡0yv4o%V-aTbP٤ 7M]Ca=ԧNg뜮M/2=7-aǘeilՄ0̴?6k>=ūZ&X-^_nVɎ&dj_>%O{eMY^_]eLy}^uiO>O}x`?]qזx7O'T$l1YcqF9zOY%lhK&Z x֤ֆ11F_47_h+d;"0!5SÆ3mЇZh;и2ѓ*?ԓ+[)8$>JsƊ%UܰmIPk k2yĪ,Gi]^Cڼ9/V Jgg_N?|;G2Sl~ `<)6 B kӌ4 y~~B|ęʮ3j^lAZ1ByAP~E؊)w66vOz=nVи`[7\2PAWYt p?ؖ=ZҎ>-I[{0VĦ+:X"@ ;VZԿmʤ)x*R"Հzmݼ0&4hX&HD`rFk1v>6i()Z\4v0NJm-) y7n >NQMkwb~$)N+sqe_.o7hZr&8Sףȫbo/d5Vd8{ęg\"ljVe 4Ēs$LHhS wL醔R6d%](jr1R,QkI1HKf;յaÙƙ[ WWo;oﯾZz妬C}h[ `nP9ՏWir:Tۼp>mKNOWXu]qƥg';WM[O.+}޽E`k|߿=/Bo\ )}g_ g+PBߜ$|A"8A.ޜ侮6__%?FtQ\UU%_s#R*z#nZDV>t]sy$|OZ8PC nyвwL T .%JEjoE*L10/BM~(DJtedF Ivhjll 9t/BZK}J/oSZhQ%ݚ-L);bS e}^UvHv^zs<y?Px`ZnQLe&y S$O-}$05$`䤹D%nqʵPK3uN | ^z;&؅C5qZ5$ d9I v︃{,Q Zzg LݱN}L>Q" u4& -:Kl5X*(4l+ξ'VQ?i!z|4u<]~+:0|eI]fD$$X`_xؾό,>sgń/ ufgNU$i'Xgf_dUϚZ}bvpwCvr)JvדcSs'|CD"~qVFvrsh=LGsʗ?9q.[ 9w"oP.뽿M g5ޡmzQ)קge%(sR;P/~aY[]V'cύu糺H\?X/ x &' m䊔lAR!)%Rriv랪Nag4ñ3tz*}Ч5/i8fnSk1 n4}+ hbP~]t9M=`Q 8LəbNt.d:wqjV݈.:5!{U '7FѬ.ƃ Z4OL&s~۲np|gVw(e鶈< Q.VMV `Xîes5;OܮӧK;[5f <i)*)巖ЦX1޴“H$wg{+i;k>s'uR!R@<[JԽ]]eC,vti6K 'IWOa}\oݬߴDxN0M"d`mbA!N)bLo+Vmʭ)uj,ۃ&"ӝ{j[ZC6W~/~n(b}2&%̅:Y[ov觫Ym߁Y[,0gm #]t C: Ea(qm`*&8 o =sp1m9xmu~]vݫaaGm\G ze|YPc/>>ټo_ci'ѩ ם} & ߁?O??Ƿ?|D}|w?X 5B=>jKBt ?~hr ͇ߺМvqGHt;nQk匿oGԥ7D)!Xz&?A/jt\klO7Cl= vO8E$DY)ao`H)\~;cP DrGssQ[ߏxș4)[-L8OIqm)3aHLUPD&%`fWGo}z6OG{R&ZhOYNNX:1qG_(ɹr)u$) vP=uP`6y9A: $G+IQE)MZTX)%uoClo{pAZ|!;WJGp_/!,`k U cJ#Hm@/ PXP2V{t;S e,sWa_ }mmG0Pyf.9/[tvzS |ww=2<u 4nS,v{C_m8\ -k 5ϭ\mpw֝mAZAZpՓ!Ed$lZ! k6EX(@,d+k,PV8d.u@U!z*ǝdL6FVz]g WSWm6X} 6Dփj">SGR5ڨI",K) bO.\hٴ:wl9_7mZQ~nF_GW+ˁCVYE 62(cR :drFr<Ůŵ+txw ˦@2-s'/3B"r䝩'[WGHK󓅓89_]*Fz/RƔJ S1AgBJ9t.Qӻ@Q\ [V:d ႉh  s!M$!H˙eFHL\ =k% NBG$.BX[排SWh sg)YPcMʭIJoY;uy whn6ͭV+\ .z[K^g4^iʜuKo"`UΓR%lX #AN %B# b$h$r5,tԳNRˈ"pXB4RE%z#y7TcDH୊ Kk8CbXPT \o vcY'( RđvhCB>8jx8()c@TjeZn6<&etXvI4WO*n@DQ0؞Q$01A<#A$Fu1Y Q%gW ^n9\xAF*j{4)lt`4 2X h[[*'F<(_6y7C=0 ,jsS\PBhaȧ 0VX#꧵,1Ǫd#S{ [_kR1|%gS4a:^"wg-Ҿ T(Fo:ڔ/VÕHu H%z@Z#78o..ƣ+@UM8_Af˓ZUճZ0'*oze4*d׈$U)W%JITL AZ,Ea~٤?q"u^jm{?uX$)(Hc=:3bT❦b%Wc֘,Yydc3E4^JJd 1b΂wDðq$sLH x9>*Zx0޼UJ`/Yc|=7X |[f]`dJ>V̦#$xʣBHSm Z9m-B*@Z btQ,\_\^v/7F窈z9ـoav0ǘj)p.3|r1KidTV)bJ9"c겟yJGQ|BMt/K7㳳dW*.jaR{Vrv{#mo~vo[Ruy%uy v;lJmΚ|_p;{oƦ/|=FYeEo<=&9 8:z 8̝Qa|N-wtVsrT9E37hfQ̢S̕uXRmh:?"^\^xq7iRc90P8,eEc&"Q5238obCQ{r!I%iㅑJmc} v)J+f`4tvQ7J%%x8p7_d*!' U%`$aLkkMp % ta=qHY (W#؄ AJ;f2Rʃp-”5^#rKΞ4}BF2)2(XJۏ:8iX%Nj瑢C#ǣ#kAb`n؄$>& 6eƗsU RYU4c,*%cJE]"JFc` ci;f`ތ\FǤcS'5ߪd @)DRI7amQ /Yѱ#ұT:\w]{/m mS-A80ǁn8榳AŕeNFug4<0+mC`)$Ť([ZP'rC2q .'XosY!b x:{WRn^G|_z>sw4qtfϓ>KK"βd [H ::: Th| H*4>ƧBSh| zHL ER弩PY*BeQ, E(TʢPY w1 E(TʢPY*BeQJʢPY) E( E(YPY*ʢPY*BeQ\(TʢPY*BeQW((PY*BeQ,J8{(TʢPY|J* E(TʢPY*RиPY*BeQ,JBeQ, E(V, 7pȈ*RZ,TʢPY*BeQQ,?.*2/VJ+d*1,rrQ0TxDV^>*Av"9!ԁ+ӑPYJL 40WǬP`B<UX-zg՘4HDk1h^3#ḽͯf8j2TL}8ٝMO#r=:ܑ );>8l쎮[/ɬٸ;s~V&ujMh]51 '0 ńp3Ri1/lT\Hڹn[޵oȍr<\[_ݝG2?:vcoxZCz1B4?o}[gCw_N"Za_޹a ͍ofX;l0VgزsdUX1deg FR&ʖUEz0rm㭼,.,y^wX|:Yi;~Wΰ2|{&+/d'$;X._ޕq$e/e侻Z:u$oN_L2I>CR$RHbE :o#o8rrlvݡ3oE,.{^PkLrSBܯ;n)3H[WB/"RKM)X }tfQܟtENλڶ!7:y9Ľ{J*br J@ʒ !) 9+o1n\+I~>4sQ啻nPXĩ9;* e ŝO"fohp82LgUt2@̊fXOE.HґF.L؞B9T䈙E }Ȁs{U>#Xh`2%}utwH{0M(;vYI0fBӞ`fZ+_o)ID)|)B>{ɍwI;ak)Y^^ġDxyH\GBdZOޅ)D+o 9|g|tU>.AzzVW/_tJdwYS6ů 1|%8qpa\Ӽ`sF q3_OiaˣQi~&tf_7e-Բ."xrg7X + WF!Z%㴔7E2ƅ!ZϦ\:!=}+76寺f~tc,rWeΎ&wio?7룅S {iFNx|49dc,şogW/HbVqZ)k\zRh+-َT;KΪ!uTx; >$v{)۰/F)2$D%YaBBeN*A6pFx%ЃDFyՏ0s^Ӆ٭H/h2gk<)l_WrTs#*9aLى t9 i,SI+M .K'N[NAoeu6#ӔȲ˞Zp&e6ZG JقȱArS!pnk w~ P-R4 <&Z Bd)tLj #>%d<$\gC.3I-Q+^kBuy} 7#R7j"O4'e(rV&%=pČ2"窨̫۳ ]5(2G]+с-ڐ'F@.qpq0q-mgYCd3ǣ偰mbF{KO+sœ4%z2HYڔJB׌|HE1L@8`f&D# CD(5ǜ!K!HDD/#ݦnXoL0+]b ?W=.rG -] & ij'kGdgKw'H./|` C⃙6rY^|O]TD7MO2'zߜ3&kZ2s θdb[#%qO1 w3wpŵ0$ c.pTӤ,rNiCqoWG.''L0"C^~wdKVF-jy)Wpu uK㌓'K#dJu̚w7ۋ^v̯.x4;Xz] (d>Q{+ޮ8F:?:!^)֏ʲϏ>nFlFKqUbyGp4d`Ţǣrc>f,NKUG].rӨ4~20GIpt4%wx3@\cK|!cerDw{o^Mǽ۽߼^޻? #)MSSǝ&BD7T_CxˡI^ch-\bï3sƽ>Ʋq;vOtk@Ro~=(4;/Kv?TkcKQj#4mb'YCݬz~VΚSS'B0&px2®Hg͎cq gt_Zq F_]m:. JHL,EtbRG?=e<> p]N3Xh5B,7֥Jh#!Llt _,s۱CE N(!\YdV9dL {Rr  x=jm)Mܲ:﹠:eF]>}_QaܢK X%2#eVˇ~D[<#sJtkr+.VJZiO9 S(ekȚJE1EYgS'26z YȃTHAyW$ Q *tn- Ȝ\u($DWh:Hܨ}EA~! ;MsӴɝϞW10m@4e$T~="|4~ Nk͟pFўml+z$ސҕrp8+6mеAV{ X I6 Zw0*z<=]PJœF]F''6.E8.?Q;%B;xrzW⽥z_#=ô'@oQs%AwǣXJ˛}2ѻ#5?ؽZ{ËI{ݫH Ar憚{J <2*zLУJD, $FѳF pF.\¹13U)Vh[oj%z+^v!p!Dm,jT֥Yn)e/kLU?x@Km畖NI!~=X"&bpУ`8{uj_`)%WT%W`:ފ!YIME|1$5Ypt8 g;i2LjoC{+Slh@pdy1.Z_*nG= G}9[ 2F/ϊ;)<5cBd5R!X-.hr1aI;8G #9咄! LʭBG#=1UMj^OYyrW-w|Y6ż1f;RKYҾSu&f2i[’S`,xiY-NVgZ;?[X<@gW8ZmJmTkS)Fxǒ=caQ2nUcLKer0R9ǘ Tl*(ož!boӹ"M K/;׭!%T9NU\EE)(})p 2 e6,;L^H {-БK,yC8=_pp峗Dr,iFOU"#r j]4JeaY&$,0q"dlI0pZJqA5(-C:y'{ަsM>۾?[%J%SmLeALpS!Ȋb$I@{2ZŹae>Ys~dw<*B4R:E %E.,a^FR3t zZq6&Nz\2#'A{b$xv!Ed6Q4&ΌD+Tɹ*8z[nM7T} ;Ժ1ë?*g:y!|:onk,FN'n>[ jǔ-ZI Z`'FTm~M^>[CO*fW7Kݯ`~G7Z}:N7s-e` 0GM,+&9Nx(t]rr~Կ͔.zftlń9 1:z@:z^ꠗ:sKgz^ꠗ:z^ꠗ:z^ꠗ:z^A/uKRA/uKlrH jA/JRA/uKRAkPOQEEoPoxb0'jf5$7iQh 5(O+L\\f}̋oO]0D ))L# f"Re 3TDY+G4 *)ҲH۔B/)őS;z9HELV=E=NݟfMhh8] sC}d0#hRg 0H`fȧTkf}e%3X_WHnfoR#) 9)G&R¸;ÅaZ Fr"2.x89</&m;n&[C=fqѦy Q4{m*GXJBSErgdN7oYo< [X}j$ܗcM{bYȲI,WtO8#`SmRI?l2:k_nF?~{`h/nx& ک6U(6 Y90K̈ ֢$]P)FZr $"}Ĩ` /G DқSaMUj\>&?gm;;y-7kU_V/>mzWO E%_ΨLZx8ٴ^x)=aHRz\muz6ɖj08-bdi`xP8L!tE"&3=  Vq(joFI*6^Ԗi\9$Na9GG< N/9!Xs s`^F^]uXRH :q`KkTG`(јIw2|F ¢^ O0J4iW$LJ!*(JLg@kdNxx 7 ӂNkߵk.\{Aŵ~"׺TNre`rxxzP-`imO~=YxAG_&nmyё'd]:ڱ]C]oش_6D 6OPлK]p6o7Ww'5k wz捶^|w3e5IU|v{ݟ2ehXz[AC +-R[NaFuC5N=-OIvcniEA/l wRu$i#8u]/y̹v{0ofzX}F>z»z:qcX ΃abɇbe%m+W۱\seuk,j[|vkPeIGVr(aT!]QIA1٦%\L*q|k+(YpKxKV2?[%O & c9b‹o[*+)d>% Ɲ|}jP':4`P"B!XoS(\Rn?hhݎvp,!{CkmLQ`ֻ5W\sd~ gjS5|$X%SJǞ;{x8A5)/Jh'XoG \d ҆Q.[cR;ĬW逰 Iw^ wE%Kf[m8Ғ&wZuO:hhgNYdV9NC6<:(^r,1)& =>* PDT CJRJK^} i,{p#@S@*N6bv6 3 2VHCkKFD¿@o:)剕X$)(Hc=:b@H* !ii Ocj޼/XH>70CMDpCLDl,,xGI4 Bt@&N߷gn DrD+| a_?hF>ɟ/9t9?$vSTM>"3Pk"i*si0kM7x0&wv8kR8_0ְM1"n_<— .3 c<cnXօCwʼn0331O4|_1 CWTiנRs 4LAfҶ9d\.z6vüH$6\Em۶Kq-q$%46k}HmVt, cChꤾql4~f$Ӏچ=ɍu2v,P۪aZs1ocJ-D͚.zo{^T .M;yҚNȁN :}-.+@y3(jVNcL /p5  &NE֐T{&RrT{ʢZEPp,]k>o&O j)+~ڸwym xzOq8{~FeIk6iGb^P x,7CbWSzKXIjƨ1YDh[\Kp3X_1nj%=j%(20 qΐ $ev:c,`&F+AѼe&#QHYu2LwZ ZFL&Z iȌ] څl~WoMNWԋ|/lk;>?MX;87}&Pڪَ[-|xɼuUo]N7 mq$FWOLBސjvÀ]LoٝZ׋:\Nε]v>qek-CJ輻G#-@Q<,;:&\G;\R+\NВ CzǷwYtHVl)Iy϶ (2'_o`sdX3dmg5 FR&^w]\@^hzGu\#j b!,)_& gḰ~ Gށ~w,59ݍ<$*v:o?x};zHO$!$.U`J3+(6^jFy݉! 0E.(/ qfo$*Ű[)rˀN7iS!馫%g~,SSxJUU*wq-p4bu\R&*(c ^JFd>ZRQ=)Q[M*Ffd j0DR%ctKƌ(%I( +&mЎXŇgf-} j3 r9U42a2g'ZK(IR  %^F^ґ0хT9 ū6ZxK8"EY1T'f`f8$0OA$hV`BxX;rXA {C<FHHp4pFBb%#11)DXҠJFbu,pN+V8ON{Sd{.W7/(DP* z=rmx_֞ޭͷI{ƽW_rzh?\MESDbz%QG[ ^f>6E_̌]ތykXl~%FK5#CW_&IU[IƣxsjpQ+j-c1+Y?{ƍ _:6"dMF\INY5,QlO&"9<؇{/,F~" o㛁KB|W?:j#O*z+(J4cQ<Tǿz&B$EQ&ףx*_ŻPL`k AVv!t.g6T _we_~xIQI)%3ZX`#ɝv~w{ɛciD&-ly ,וX2tiհy8^!|I06Kqכ-w~ z:ݫfbR'._H)reI%iʽK2`r(TWm2"5bD,Z+D.K#.y,O[>/a" 8R؊H`g4֗z"vE}Ƈ#OƴK~Y{nVaJCst"RnĤE 3=hz38oXCQ{0r2 "@DSLc1>XXNx@;9)t6~_S@հs7o=Ty-`hN)MZ,-Q:͒R"4=uI DyZ`LAc$Yp2q !em+HXG"\JlB vQc(,<,pkYf^6 Cc9He>Rd Sv)94r<:B0n` 0^ׅ؄(>(چP5QC#s fV8e*fJEQ25 4V t\ܚ*(æFcS'D$LJ!>J%[JP xEA2'BfKx;{.j#5^ mΠ.p0 4;ٔFt.~Pre`]$fRix`,(U2xi*`)$ۤo˼n;Y{%qUFgV!N'Xo)ssDʯiiCHM1F: lpI^ HysQ lrc͝Xk:]H n1k{:mEbzq#2f+mG.ᢵʱCGBG1$mӁ]@R)݊6Y߮40UVW)CXtk2Sjoh ]r.LXfG؋E @^A1vI=~CuQIQ6,r%໋^/σA/i]vzD%^<Wc|3#9t`95zNT:b9lO/?T PBB,ZZJL)0"~ Ju#Rbiǒ " kTBScjވ[d(= T6!3p"/M4&wלL_,A&XoqaA^^qgȇ; X`T%{B3`"RSFDE$a$EV UXد'}uVC>] 1`]MvK>NҜ6 +Sf.k5ɲo:9Jٛ!y;Ydi{ Ѵ_ƀh穻U-mQ*Z8//+qc1eqh _:xdXy廊7^9NR& 6 Bôd&mC%&;&)$2\b?$|" ?BE 3*j1ĽbF:)&; kfoըf:n)Ex.Znהumf[RS2QII=͊u\Xu` uDtҡm|4t I(E1o&I M&EQNS\ѐ *ɽQ)LD=eQK"J(Ļp,]k>Lv jo{6$/ekC]^U-#3¦UG]}oRH1CP+Mj2{_JRjL0F%\Y" Fr}.+Ʊc8gTRJd9 1e #h*>$;ZtQx6:yGٽ> J\rQ 8kkt:.dF)TY5 &eB}&a@ ()& zSNq飶ěU`̙ rKl٠JrY2",,{684>ԝ~7ŕIJo*It_ҩVԉۨ ՔB٫נ+.IJUŝw[VM|\g'Dr6KGoR}s" OI]lqa!|$^{q$Ɨt6 iS:<|Fa0d`ZŤɇM1{Ɋ㌷JQl]5G FSH}8r1ȯX$)>pz qOAJ+:@L; ׻r?~:Oo~t:{wg>XIpNڦ@SN#O߿i54 u9o2.-c!As3Z1?]v.4QO:t՚`Jz | l~YYħIϬfYi)n]J(6tHb"m6]4ly"fKpX`v4 oV@u vƾ;;GJQ&߶:Gf|px7M3k^Q( )%J)<) Y,p@H(@XAhP DΨ{tH#ˊT(~EE߶ӽey0:hA4i| a>/nxq}kM\-QjW:bHY I>~BӿI"eJA*wJ-#SEDy^譎 1#*@7 !D3Iã^"/6PpR(hp>Gr 8M(h( Ȣ` arbcјXȁ.HPa4W米Gyv{׸H> @LO$n9{WFr@_FvF]'ɗ5KbL2I俿3Eʴ4(y ze6=CkO L© q&QƕY4JFth3[g(A`Ab/A %He:P{p"IȯMl&3>q4RnM*Q^k!(d$Z}8|gN'.g>.g%̽ptN :82zoP?NALWɝեDîUYB2(I}Q;le07L/RЁ/φ.E`7^0-d1qj\xЖ Y@GjQrUO15pu"K SNm9Y0)Ͼu\DžSH)C1)#n%8Mb{h원+#-ж.L.҅1%gu! Jwd@Ch5 D &\?@34j : Jj΅00F%!4 x;#'Hq(WFk!hATB%LTw(a7aFоtSܘ~pb _UrCl E;Γ"2ƥSQɒbN'Ei'$һVsr~2پ9l -# zǣq4pJ*G)@Sz#e 3Δ%9R#rcMX2$S$ &Q m6ֳƺީws*^@f4 mb(-%,$ z\iAlZPj"U&USB:oT88s"Y`ѐ=ed@D|Nz VkPPˏ{$6 O9̘G M%%=D' łţf¥`mp4pLc2Df6CEdNW\DѴrs6  MGj[s;"طLS_J|;S}1ɆoZ2e L&RZ!)j$<(sx >maP4p^z430Vn( јT;Tڡ~MQ6z˃-euK9* P mu&M cp zq8_ ?F-tB$/-'u@fLQ ^e"[:l2h4'EvU=g[3Cz IҚ+CBL!B#w6QSR$h?Yk[)|US[6fɄϘm]?;؃zt}W&ӶIoA=k32{\tz:WUAWs裕{}25 ;RbdԺX"+}w?w(Et9X+qzTnoAm'3 X5[Y鋿0aVBg/xxYNQ9(Ԭ>?M][VMkJ&zwZ˶R2|}JGޙ;9=gC;*\D L-AxJf /W:p$ :uio.lg\MQ[y֞b:Q*f>Da qyH)~|@ZvF!WW].!];ry )Ozr}E%טMGpj+Opl(B5O@/=#'p_S0HU$,2⏀'ly G0cwuV r6$8Ҹ =Hee Q *鱑piSf;MPg)s':,:^r>ZqYic8\o5:4ð>\ VH_p|GT`wpK ٷoV+dorͪA[Qc ?@m JQo+QA&FkE퉓fQՊNiL6*"Pa\Ѣƨ 8GrG9Xbt3&"s"5H7Bh.dRn͆p݁^,n~{qDžw$CRm'Zn/[-صQA_՘0JR>?{0-j_ڞv{ T_=NbřAQuj{QB9a39֊s}FxzNrrmQ$K>y(xBHr6q}w)%K?~}ֆm!~|^C? d2K8/MWD꒓J6g%tVwaQVY>0~>G[q&:5RX,irwQߞcGk]ʛEpv3Fhg9n?'r;ۅ"D gۋ-I0Ֆ\ff$Y!3g1[Aw9Ѽiw|ऐVꦱZI](GTGroa9ܧZ u tCća~}ݳǟWo^_o^~D9:hu~GLoc?oՁ!\i߷oZMmݢiɚd·iW69 ^QɼImf[+c~x~|9EB9`n5G "LF5?U+\iO"ش/\ډ-1^#-XlN J۩i:~Um =K1dD<2cڲ70:_Ov$$5Z+&Oj@$KD~G6D NݽL,0!qb)'SgcI(x-l$DZhBhM4L2hj$Sō/I(s- :gӮm$fwLp)ybQQ㸼5|ŝ&RrLJ !X 1=8iGObC'c B 4q`CP p'B:(3e_l}q= Ɔc};G`h _5-z -Wg>"BX/Ͻn"=/@YBzT襖I,qB;";NvPBvi \ʻD)L`V zA&A+sv< =$ß-ZthBK+[̣e2|wqܞo7Pky| 0J!fuFʡp&Ƅ8(Ja,[S% #:j -ڏ3E3wg |G 1I $T ҲL{cN$i6 !`*=[Jԡ @<1 (+A0l;ƙu Y Y s/\>' XbufzPU1l=;-9j>^UDƹ5lݪK [2CGUX!)}&~^.rgō2 _&pu"K=}m* 9's N"a"S+eIOp+i"8$ܫGc߄_I8ݽ)mfr:l]qa n,6}{*I5R C]4)Жpmt};OШ%4(9R"`\4!fs"0]YoI+v0]Vy0桻11<yl$V/od(Q/II薭d2++/"@]7J{C*{ee E˹Ғj)ݸSJ3U~1b% oз|DE)>)ys%6x)b P^:!+.s(^Gɒ4>0Yt|Ϲ8yh \Y^D&R'r4˄U SbL y0PjcBYE0AXa֖Һ"fIIHg3gee>=2m*>:);mZ$#,ޝ !Do2@ $iٜۖ4pnbg,p^{#ӿ8@ %Cf։dXLA:LY16=Ԫ_vqi3=i;(ˆg2$Y@)rd|8b3h V^j)(Ze~zl_mmc{ڊ^6{9L$|c??Y7JnV#=;ԳLχBmƍh_Ҭ?רAw^4,O`{\\b?+89_x?Y+W:J$mx/E#vT'&W&2CN%ҪH{(N3]]i7sRbX4so.7pȾhډ C`}}}@ ·w.rB֪/rIf/nv*7{;TjDq3=w,EWitb\bREnd$\!‘S[,K|:^wx64lCu;t~D׆zan6fKp愱 CL6XmF#Hzd1gʹn} ퟽Ϊd9W:-$Kr8Kl0d`r#ʁϨg>M&yS7_}am9QŮ-#YBS`E&Q5݄W*@s[ɢl62 aǾGݗ1 l%yp̪* A(%uM8 LK]NԬʑͥC,ǔH!P(AH.{O'aY8@vDGHx&S|hqQXK(K\K"0}]w)B[$, m A!!2#3Ҡ9-rb +J݄P̒b 2sd8 ohtDFg):Z0[@gUKfzmtJ< ՖAJ&)LMR$[ϒ""Nf)LӬG~+Qj*-Πx$wܼB ҅F@n3`ؔҢW`0iEgp[xp[xp[xp[-ѣ9x]҈*=Y7$/SYSn!Ľs |4ĸYzBx)Q1e QѢlr獯 ܞWh;l%:zdsܒn1iYgbo㷯H;csCGq.ƴDvt J!MG5@I&$,6V#4A p؃k4ɐ0 o!`h+/l i.eu{pj*!Ց `K9_{JAn SdpLhW{(o*Y)k#BŤuR%|Dp 3m88!bKnS9}nא 1NE+'frKPˌ))Bjo"8+Gv4^JA3{:p&d t?Ds|k}"EٰJeljp z"Jò4pkgU3o;wOa7_@3bY:kmoɾ1Rpڛ%h[U2)p25&$ȝgT: _ѓʅWFhrqQ<],,I.?G?K:_q;8Xы痟h<䬞D߭ty|I>1ɖN ےP[v[YEBaVdY7=ߜ$<=VS'dӰ|ǜIuYP7K aiջpaQɛwLFc~XR 21vGf-qOPiEYE@'=z.Z ;[DoW<(o E1o%IwM5/|f. V&@-јBJyBk $P\(![mMFna؇W g/2Yj bo]^ƽe.uygۣ౯6?yC:qeI7x`)ǡ}BKy_Ft|WcouzAhaY&G,9t}ľǩ6A4J2gS"0XPh:1HąI&~eGSr))0 )E\-CVg.s.{gBUƮՖgPB6TM[lfCյ7 8_ ΀{pKwn}; 0sPکSwZx'vUvKw,rGeu ?2 ~*tvbtţw}ޮNm{|7qr救1<8WwLܵ{\}ΕRd4o1g=QoǷP.Currm󚶹6o6wqa2Q xLXdǐd D]zχߕ=΁|=#O#B&&OK U e8? '9n4cy?bKe$Ѷm?ҁqZfѫ8$wQ[R.LҙsELmH#/ɉCO|Di p1z [}ZnMCQS8F'o軺77~7UT]=Cg#gC{Nl*1+n9'dTFGJ, ѓ u֋R49)*eR%cF|+J9,Fϕ,T,A>Ѓ~2ʧܪͲ}}Pٯ܍~K؁'j? ~Mr `2N;3x>e!W.uL9"~QOx??z0LҊ|gq8=-oơa02N.x[T[q;-ƖVVFD|jU`>>vw>2bE},[B a`㯿 Wqeb}a{7RK3:A R󽕩O0'(M6mrI PfًWW"ͽi G+‹HֈdLWmf7j). tCQ=b7[Q#ޓPMbc5BīL߾DdZ@ ~Vm߮U{S VN>i*ča UkY459 6zn2u ؋o@I{M&MHVa/3}_~:pϛiѷAk? %ptt[o°zOOۧݚǂy4WX)$E(']Y;gZ@+]T,[-TڬBqo %,OgOMO3ußΞ^a%?XwBz2;N/jDX50:BI|#[^؝`m4vv,0dZv2=,IVYL۲NTT->\9?et<ػ}bg+^p(fFЮuPEjI>X4iɾ `}Ok. cMdbɢ\u!TM v◨tD|RB{O"J\_C5^"#{qB2"KhV3M>`K !DG| m1U#$4lUQUwgoYoU9rsDtv9bx^1e/YE{ Y!-< 2^a+'Ǚ7!w~:522gkk84@3RQ:ShhE p[T86bd}QȐAJ't褣u4roMhSCϒ?/]`/ &1Rγڝ׾<&vi*woԂvIMBRRՊ3RۤT&Yr% vBd4Yn7˝՜Ydgm0)JvU#8zn0X՘fTAJZBYXiײEQ6hnzNȪe8̲E1vMl_߀ݣ3N5:_]u&T }*d"@dxa/L/e2&Q_X&*4A)aD`!E.)H(# ?1] j=j!oMJj[nqқsYDjHjgJjcame6%8S!L8ګtbO.L@Kx;)LGkضkJ%T* r4gSeEJb;7I3+.v~3"Zo 0vS}t6>4̃d ;c6\-fXeK胬۴jH}avYJ|d7z7 ::=N^[`x2Nr&u r膚H3 V*csΟCd;Lg1s$u|3=vyU}j;;٪Q+rUXLF6]+]U:`w'[PwXX>$^>VZ8ˀ?:mtݻa}lc_NϾ&?7Q{!yZjax3ItO,~~vٮn]ykhp*[.mrG YdPoG#]8&6Biux׀ D,t>PE*%)lcL"ɧ =ѳ70Z]U R >٭Ay_7uRgHLfI8$ÿJ0yRRxsgqk\8WlŔ mHC󤣔VvMl_ANWe9gJrE^B^4N%2V`Y5EMn[8.`;_v7&O#!%+SPUn$AX#Aqgv JEU>x ApPWd]wm9ۑmMN่o[|;7/<]"Z$G Z.Zr.UtC9mq0aBƋ~=FL&? &D}93j»$Mɟ#v|>;z\`~7o<#B?c%-UZ a|2-r? ]v( 0<M 2͜ւRo/4ppq0|ؾd8NSNGce7yEg`:V֞va^uٕSs;||:_J;ߌT{r3[55_6d7i7O6Gi~2Lvj=:;?ndi3-6pu|̼Nvv927Xdaaɬ' /lXQ֗hNZo;ηv{ӕ[ l"q#YQ׌tвvF-7C.vuj-Zxh:ZZ]=ZP;E\zm^_xtuvy~-j@:?Zby^ Y-o~NUvTѬV%;kŒլ8,)OUU\~CF怪dJ%$V:0m6Tt!2"p%q1,jH LC6ęDͥx$<^yZ8ۧٗcIJ™7\sbFV<'|3DiRR "BNlS,|J} I4Q,YiQ*:E}Rp DֽgdzA d)wn)JDTQ ]ԭȴd8 A#6`T-i(>\BEk`z#,D$<DOһc V:,)A1fS!٘͊mvt(EE]9))13$lh9 E =+=6z=A3-t Jo] 0 ia>l滣orq~Ӵ^F2j}a_h} b?nkr:|#/͆#^<$Y_ګ]7QR \9%rtÓAy{kԝMޚgj-\Yj`fuůPHms\?ذȶqͨZPeE[Ga*~Zr>F]6ζ fVd >!6KD}r4e" %e7gJ%;rOkZA?RH8q~=@ſsi.X5 Z$ v"h+M@!ДMʵh])&M(Cr.jiR`Jdem5+xJ&]6#?d$lmHlԸQZ< )+% ^)1J*e`Һk*)$뒴Ңk Ud>ӥZᜤA$βɨ$eұQCMB֩xm_50f c}ZsӤY(>TN1 벳GE ʬET+SvPhR&wp%6PZ*h!* Fwy-"8}₁UsUEnhY`6^PpB #DnNsRTap1H-XˁIrcT%gE<*tAtYT%BDtX)xu Dq;MRDxp1UPczm=.!ʧ~a|L;̦2暦L@Y"PC\Y۪ #ITj0`ؾ;sF]ǯhZ\ GʪFhmb> =7Zfܵ5h7LJxȀśC|C uy%}~!:)R(]&d*T=BAApH>"K^xPAf&u&iD˅ [W*ϕ+"PS)o.:v*B&4RŢ4tT#>j &ޝACXD .1#dn "eYq2BcVXZ#e̍LH 5*\#*x{5RI d:USEce Y#9|5(Ws΋|ShtBe_j;J* Xk(m@nc NraY iX3 _[=bӈ0""*6GWYۍ9ml!&XKo&b] 17jRz+ID awĒ.klץ5=];g{&J5?®zOj/n-8V.aP 50*-g#6~I V8 ݠ5:hf9­nܡگ];`8/M> FeGżo!gaՁDV`m}иc^on~7+ٿ60sTPܾ$ `/d!d/ ~C%Idvvq|TӠE_S>8%!nXlD蕺;Ezwo}/ߋ"D2: kֻ܈>)17-0hcX mTLW K-=1,973W]+{^PG~u{ o|?n|_]kD59R;[#vtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtjGvtLvt,iGvtXk̳oGP;o/W; ~cp_ͳD+ CRQ ?ok,ݛ3=:mSa}:ط߮m=OeKV|ז(= F X2Y_$_P…n:jkŐ*N֮=wlf΁o__Ctql6x\:4>֮?&|}׶2o?Rg[_b?u-^eO=;[Ĩw״,\ͺ,4ݶ?1Smt7|G8zayc8[Nzy~l$8r[?W~X9Gl>zG}_r ò`Ǎ_Ans_1Lc8In?\ 8sa939wV> z*./<ty'+e!eS `(ޘۗ B0 `(C! PB0 `(C! PB0 `(C! PB0 `(C! PB0 `(C! PB0 `(C! P`P0oeSPdd?- :0Yˍ~ݬ=+bP ~p9ؑp;0RtTuhϢ`dAlH6$b<#EFΞa!t?>~(|ݧވ G7>!\p;t<,t1 m5v]hLd!3ý-1:c,q`n]˙:ā_U//80')Em Y&]Fz)Hu7BKCvԾD[:s-ΑBMZWvNL\WYx~R>5g]\ pP3IJNVFT?ݱUyQzLtaO:hc69(sv1 #.d/r{s{d_u:Wla7snR'\"b7oD#HG;zi4ӞL{3&EGu/ ѽo?{U߾ool$VGn"w?|/z^$祑bhMn"wD&r7Mn"wD&r7Mn"wD&r7Mn"wD&r7Mn"wD&r7[&w`m; 7O!g'm9'O6gs"^ 7T$b({{hك-z܏F` &Tc(ȫ£ Pr]%hۊ  Km [}=d2fXDZJZ7aQbL5>$qv񫡵Bk/l8ŵL+[Y.PO;n.WqW gk{#6ޭ[&γs P]&6[Yۜ>~"JdOĠxA *bPTĠ"1AE *bPTĠ"1AE *bPTĠ"1AE *bPTĠ"1AE *bPTĠ"1AE *bPTĠ"1A `ۣ4m;z7n7zV} _~ߝJe{^sx03޿jg=VJE<"@Y(<Ӌ|jNY'i>`5@8hҐK :Ķ, b<5 C\[qG'G1lJcHm9緻`FK zz,.lE1pcz(xaGu+|i;c`h#[c/"q.FLබBe/zN\4-S2ԶWdr ɾo$/ܝ=+1̬+]?c@&ݸ6;}yQ.Y%i8Խ|>: c YF[oD__W 5@gd~: \Os//yrr=GٷG~m:.uw\,>>xY}iLKcNçɴLXWcPWpW|=)?*ƶ_I}JJoˈ̦t6(`Ŗgӓ끞_]sr:YqZWlU?rUwVrxy{PsdS;W͖O c='5'L_}O}o}o|Dž{޼ou~Xp!ڄ~NtwK+Ҹȭ\Zz5\W;1]]wt[ $ۣ.>}3 ~3;=;)?U\68(L+mH`Jh`acg<.eFIdy1}#H/QIaZdVTVDq\o b~ӄuǝ0?WZ/ZM~ẲuFZF;./8.^lNU:SOqFlt0$< ;y$1+#1ˠ-Fgȍ2 )[JۨZ8hΙZ+&OjH*i}̡0񲒵`̸֨{uL?W~W@Ks~fhZ7zk=KBȣBN\8M$&!0L@2i{Uj w\H1Xr"t!res=AKξѧws [&a[^J캫>.q7G|uhY\{DD*ι.*w 1Y^w\??VYKCvPJg\o!DgEo2Krez]<ٸSosqf_;d12v/żM#hd^OB$ZD26@)/R4tMXAR)jh˫bh)H g аq$W6@Ja<tF Ɋ wD؂9{Tn@: Vw.φWs8Enx>\ӭV8b4Y7SW 6V7͓kX y.Zf厶V>PH 2 r1h E saqXc1{ GݻGӁG&)gh:j7{?61 t8nČ)2蹇-7 );eD_-<_NWn:vD!"}udDwSn ȩ4^ %MH͓kr8Ѩ!L(i=DBZHJd>_}ښ$QVQzC*-o - ^Wq%+RNzӦv(q̽t jcΩSZ%t%+"xdǶIVM:dŎt-}8 % Zu< IAՁ9h*I,^Bȹ (T $و0f^rrm4DJR0h bHT Ug5ʆ^7@V+)9'I\R$rN\$2) m$qP)0Fs_ksנ\PW=IT6>P\ȅ=˜Kn94!:$j,P5üV=)dC(vKJwn9}HG$pi#t 8BN2+#G STAhkyP( oI"8hQn4#2z J[DjT' U9T-'o*nqg9o{QG-O3BA'-TTmcU}WRz!ZCQͨN1&SBncEѤ$R)\B%UZ3#g‹[QIta18TºP蝻ʓW\Ӌ>~i&oT;X@e+U<'I殇Vʕ>R$$+FiQ O!.2½ge6)ᕈ٨{ĄaU HЦ.Fn`\.jmSXkVkvk%DiޫCun 0 9fYsVǡֈՈF,(-l)8Iiʣ,eFE5Lh4 Jx7iW}+ii ;P}Guo,bl[(-Sj,)/m캜kVkݟq N0-Sdg2PF,7J5%7|^qv>s:~aYKfmMS0rd}3}j2Z>5Z+νO RɈj|>5L1f{\ftatkpťҦ2ƆJa)Tsrzz%h_%]YHN7]GXA?,/kN r@LbDt1TB RA[LX/\G>oʾN[XW^r[|:Y6$J&)$TN 93!X%,㋻Bje* 0%Odn4? $V`ݗCA.YupP3W شWAgZZXac\ҠGl^Jm%*hdMnF@L6GheZn EDx`Qb$"B4+0O1R o ].`3)b3]p~!–)4Ղ\>+t 5Ii.|˃0.?g̸ϧ.ٕՕPbM+qP}y;+dv׏#K\P@F ] ŕVS.x 7p1f?";X )XO] \F@f 'R2'7>tiK̯?S~~\ \q> 3t"o-6? nV41!?r_F-g{4F-9buj3MGwTuo|__SbD?s~S{uݬ-]p!z; vA dXx; N7n6F6fY?u`^żBt'[Ngz8_!gGk?8`cF^*qM IO"J$eJI4F"[tuO]ꮋZ;*g׏:Q*0uTCLF0GHN9{ {LL=V隇jϓu˝==&vӇwo߿+?o_޿!8n ߙ?Dݜqljho64 VYO|qUS^3h.ƭnZxvvGd}bՇjW-~WlPwԙ*U 'V!Hv@`ݾr~u|2n}k};D\lNU;۩Z,PbF__?ly焟ѩD,fUdBc/k7nta08(פNu˲HF%IɂČ36X" ae BZwGfE۸C49V 0,u":#mbLY/3 e1sg@`B]d9xh9d7L-(j닣OO^t ] 5[慖//&:JkMdf큈U`_ `m~3v^JY6-z$eb#'dU^H4Wng/ꕡ0XIRZe.dN$T ii܊\*Lk1r+uކ`1Gâ$$yLStE 9p! ºY.тX%,a8 "") ύ6Ld\cxG4tuIC?ֱP*d?OG=Gl=Aࣅ1#Y71k[Mb'h3jhɸؔ g2U uI6-l;8sKq2D+EeN2+'rJP"&lХ5츍C |b:'5:tREj' ٠BD/?t9Tia>W._k %g]ӂ·-T}Ej>^hl"Lg"1 *n* Vw°-챒>·oOB2ʙ!%*#L3}BA$C(! Qfc 7A1RB Q;^c"Y[,'iW3$x9t/loA$,6+Lڽ˅;DLM>((wz"kI- G9ųD/%ۉȢSۘVW > n쥶{m,#*Jh rm2aUP2'Ls!lc0\[uwFg-de. %Ì)Ԛl6sV4,gm_-LFےd3M6fNAF$H:Ltd$R+MOz=o!H>h!$a)fpxBSN%b޿W#W;PP+{mwȥJDZ{&ڥ 5*E`Z$åVq48*S1x"!"L;^gHs),J])G3  یCZj/mDHvDWG"|mO5,|zpA`Rj.*D@kEd!aٶ@b F=!9}cƪ5E:JnX:s=\o}7cofM@HY@BlH&RQV4hK*uGV*/eR(~G D$u\l啷U.IM5yJ褠RDRg f#OsɱJ#PjmW)];jDyYbjD}|=i ;1j#zjopLV toW_}A:$oΕd V@?"SKPj W!2yLXL|X.0LR!H˂K>*Ť`5ĄQ%"j-ɍTY,J^"jBrD 9ɓC*y[\fa9fz*Ͽo@CET^WxRI(ΜJ@]9$j93B%`{ IQW`uE䊅-WWZ]]*e,Օ|]-@{egմKqƒ^)1i`<5j|N +ЪhŘV߾5-/*zf@4iG 2woA?-^ef^4Oo f 5i2TpA0-;P k~̘j"D6/|o_xL2TAb$hV*Ϳ#8ZCC; n("vP\lq FKdfԕJPdњ+ꊨ˷]]\V]=Cu3Vʫ'" N;?(_^t_'1ыiK0rwZZ 9 ᜉ]$\^SI|@/^Hi*mvW4|y׆D2sJ-)I yU>G/M^C.IWn tG- vj=ER>AKMZ&HdK,(#zCxfc*j5>\.% 34pRxҌ;tPIĕ5BEt0nTT=ۢ񃣗k^˥'327*JҠ\mVʢ1:Lp,D r2G_u QIPg4󄎁1J@trCeP--Y\HD$G 0K`nq7ǚJ O^oыm.{f!m#ʗ7z[$D0,Id*["'t@2M$I&KSf:D#=+!aP5g9'b![AjVnS_wxWj Ͽ.8z@ǟu{qB}!o74 {)a"M9Oo _MEF._^=Qߓ?/\fW&+ FP}y/+d OH@f`NޑB{n4J'}o4&%xz^%`9`=X.1"cf0&]ӂt4XpQ׻q|u> \LqPt˯{ՊdJgMWkW.N?<QNmɚ ECΞjjb>al14zt1f;frJrvIu\bQG%"d }~ ؙ)a/a:IŒ*]PMM M͆&ۺЊ79o2jrk]mŸC sZ _:( RuItީ䠸~Wly@a>귫TY//Z auuhMʅK|.; 9wS?(Y ~1;9K ?SwY ̪ _n踜*J+)e('\4,hH8c%&]֪ t,qu|wdld{birpʥxk=R(3&ƔB:Y3w&HCOeɀ,|2ӿy:Q3$?'Ȑ _<Q3 ְBpyzuZ0V1|2PGZOȪe+_i"W`%U H,х̉j!=Վ[^;Zl]hEVԎ֌R9|dZ댑[6T93>1@r*?{WVoeC6ٲ[؄Lꏜ(!:$k)YhH+YzeVH<]dtNأb ԓo>O3+3! @/=*iB[/wpjSiBP‰=)H#3J~W 3x5-F̛ V&&D8NkNn>k]R%7ߙM-܊1+lQgRlm2"*[gUW_'b|TƇo3i X""ZX/3/IUN( ggTou ӐZGlwLk idZ/y|K P6 sɱMz8#n.p<nɥ\%N[R f%st0nIe,0^VOiJZ|aT- )cuڔ.ǚ殟w3b=[:"RcB2A#xd$ Um5м$ 'JѷO66qXG Oǁ?sƝ_g ߟ*va~LGFvOg0&jbAfчm/ .ˈ RS&kd |U/k~/Ggک9pU.l0 BI \IʹsHbVZe33:at|Nr留O ̛9nYw>Og|[myhډ6= 9c}Fb*=z}9JDM`QAQzW|ęP68s1;T',clT>"2+"p0eF,#P5 E#98#hiΑ[w7_B|˛t~5O=t7E&jz{kWt5=,75,RPj09ݎ5z%ZKM̺P[N)T6JGS斊K` Y3l8-Im̃12|&##EY.@חvZ_Ö11D`Nu"2q?՘8aef f3]=QT5(@(T 8vB:xvl(XL#ac?Q#sE@5MjMɠ-8IIBuΎ $_(9Sjw\ϿOO8, Z@3ׂl$dqŤ]&559#~jsY#= ᒃPE&ۄQ6on).)ybP]hj ǕHEL uQVV U(;,E{/L n&4TS.65.ƚ]N*=AJM_2.UaF΄; 贤|]-cZ|*ntJ6ė),m"dR\ɴ$;SK%HP`Z~tாmhI#b>~a”R2!~Qg#chqJ3!O9Busɯ[[›FW'Č[ɱ >fnW0 d6sm֌彩63-$2Te|Vz"I0I(-!6jL9Ya?ymՅ} d總fkfb_ڑjown3:::=: xXU;@}| vb#(0\I$Ջ橹&F&SI)|"3Hz+]Wrs}vs+?+g|,HZ8人mԳ[ uHG{ґ^&ҍL{ߔneR&wZtctcv{G\<Ń=ÓN_o_ՆbNɄ4Ae?ex ָUB#9=~xo6r]c Ylj;'n) hG͇"6#V,Ia. ;nFl2b8o{{H_}}:tooz;݃?__:pŷk3$Ź*[2VCč*.-U)i>QDΚ;x;9Zh˘Kv୎'휎9N:Ze35SA]L5fF'(J 7sN"F4:M锠+7qyG+zͺKo>_, z6YKޟ\z. .>|CZq7;=RCȠQ!⣭Լ!0gpuyƳcC{}xtΰ 1SI}&"lQB#( ߙœHHʒgПْ#P$_>1Dcܖqw/mHߝֿmRgxTboƷeV@I(usNj3&Vf3\}lu=yoeoZsPQʖb+1ԪI-Z)jiF̞gpeʭ q 8>s՛H䥢*W6 {.VD=]q%X!lì$DNZl}.^FGvy?9/p݀׋~1-z8L #슺l0"T꭫3-r`Mta$#Yig7>ʍ?:#}Y ہ}9Sϴn7y)V]|[/ik.5'7DH0 _]]-v\FZ/mCHM!5Ӣuo+Sl:o({S=;KrdB3k .ۯywZ뫶ߐ qqhG ;w ,wpzߣe*;+ﰜ~/rc#,/Ċks1e@Ȏh&&'=D.Ϥ+=.:OUw`M~./X=r] H]3z:l1-K$p> &3:R3JŖ~']BVΣO l(9Fawmrƙnqɹ}Ue}|;vB˙_P~OrĦJp4\TrB3E0iETȕ9j0Y1QV3<bSE#6$x&`^ew) Ӂi]IpCAlұ-j̨:,K%h"ڀB*=`D;ju&eXEvˋ_☁ !fE1N%@`8FJF:Hc2a6r6a780 "fmQgFD!b9J+ : [XA`Ir\$53Gc9+6ێ:TiZiiGa{PhbN;O${d1Əz"XˣrzCd k%$WGW`͎H@X:\%*Rz pх^] I0 `D >d8:y7ܒTUCL%?\)Dy[ qɬQdz (п}7~~!;+jdθqbQOCu PYb Twr (& oK^jQJ' i?>kt(0g19xp,&LG_{=/1 ?*Suc :'IpB\+:qiX'?Ż΋D q)S!+;n 5~yRziI BZf>%v*v/,ΊϸF3N,0E.]!m}ɇP0H\o!ЦR롍`~lS,0a-+m:fsc`W0%sH[]ƔɱT@zjl{[Ƣc\\@6TvVӘvuھ;M(PsuXhZTW-4"iuY dRKJ1w@$,gg[6m9a=d*2MT溂9ITϊm!1g&1jV]aq5rM~f;;GgP^7Eҙ~#͜HtS^BV05{_{ky6 Kͻf)Y@2,gſr~)`y*d/W[ޢ- {imWm)~%rYKm^rCtj[5GMcH~X#yJK|4GJˑ'c~GtG')FtB3U"wq>pJj|pT(pŅD K;ٔ)䚊r .ϊ>]zh'%agLpRꁛ}wpe; Fˆ" ~ZhĎGdU&r?Z"t*#L\@o&~˨bl?tcC}U( —;[p0T=`v?~Ab0 \xbw} Mk 43%;T%vw?]Tx|!Aڇ"+%Dv"rV;}L梛rfU}k U<x4@n=&Qȡ{4@%G(D8+@0h*K&NWX 1Hu4ory8.ol*Y* V2`gTV bxL <\ \`%54pTINJF=C+I r^B iKS*ϙJ#xfNXIJ)ƨ01YDhK*{ȏJck[q6lWo]l%(2RqΐM9%\ v:J" X选xP4oݴ|brUpǬw%mIUq'5 a6SFئ*뭛Η wNZC1LuIG7Z'@s1U]_hޮƟ.7l^?{g}~Ϯf Y›Mͮc/on|jWhynyFrB[74M3sZ{N6f3ϝ;wdIᏧPsdW[ia+.} ]]6!eߓΎe#SoHe\FS;+Kq%5}:eLOHxn#7g23Grf_cvyl}X%sZiBgHzx\ 9WB:Ĕt4؀ !*ca`HTbJ <* s~]`FʩR ˽óYEe.C(˜MN-2ad]Ʈ4:q'h4bu\R&*(cwYQT;ZJGm7 ,3r#c6r6#z+I0dl"3 w&7)>ya(v&j/~JrĦJp4\QLkEZ$r50Y1QV8U42`C'ZK(IIl %^Fpґ0хT;9~<&ۢʌڪC΂]'4kE+c= QcΨ3[";C/iU32 hGT D9#`|d䩸RA+~Gz>$M>EDu"v<VSyt%fq]g#갤ÒVO f䑝[wb;*DG$Vf?*jB}`R{YFUHRPJƝ(2L.&w8zpW]ecqM ǜTX!!DTh浱9ixKʘJe}1G EέRdQ0@X+`H!$wt?9MI@B LK2Ҷ连]v71rXt b]K|"SQ-PvBܲ1MgʑW v&?D3PdZPJ%"H) EggI=s^3(1yaiom4P,\1aNLF2!) t(s֭ ZDlkF[ :aFhNP~rΜW`]Noa:S H0s1b:{U-c=U*fFnrl%*+SdXRIK 8 ڙ_l]s[\#k\y=~}~t/OwS0bQG"`"RSF%` Xy$5&"OO?sx0v>SYZg*hwy:&^+k)L@yM>jb w MX5u[`!ˑ%h>S ΅yFcPH'e|LP.*JKE+X>{||o=YQPq9Z~  .K1A(DanW00뤶En a&LI`Y |Hw`5/d#47VBDIá'j+ܪq<˻X4=jw?<]1-wZ47Κls'jT\EcJGεbȄ9 r%"@!ht L|Ao);=@Ff2ȨHFt E03 eiP A#E`$ `qJҦEIJr0 RVFi$`z$MG`"f&UZCGD'O"d'_jijH" 4h &+"uAFZr)!8EI"S5lsx;E5Y?C;3~=BąNl". {:o.RD2L᧪(YaIo4f^J06. FJ>oF}z5㌯애6\몱j:N ё1pT¾~"EO .Fq] tEd]XoP?.\7ߤ}uϟ~՛/ Vo`a8YE!;X뿜Ҿkru 5]ڢkNrz6C^ڞQhSMY i; |ᛯ٠Wu޸hWT>UZ+_Ab;C`U\Mڍ{LV"}>]kqx6[2aFivF6;1T`~jc,I7 SC{Njre~dRٖSٻ6db3VG0I ݼA2cTHʎCHK噞:,RXo@x8B\Č3},K aiBǒIGGiMC!9pM .#zA(31^3;d1 g O($TIY_ _5x"ϟ9SQ[߳KWk0`j_뭴Q뭺?yx#߃Ma2 E\p|wXNU;yOGfC:?dy#ߺ'7}D!T,q c;yrخ<ϓR\,E( $j{3f@8JإaO/@Q}y2ġ;SDnD?K淈ZQ5̬R]̍`.S `nkVV z掶'ojHW=e+>K^ǰI+qG2x>[L'jf) $Hyn(sGonٔw]K4뒢7?>ADbې`4)5gOG)kCO.|(,ԮЍ.>|`Q`4 . FlۢUR=zl[LF @}ܱWlL2ITe)@YUpJJ1i"G"%=T"ĭQYH9O|: qۗm/[C BlâMv ˇj%'Q΂KrEBLf ǔ`8GX0aLU-8Pz:sK$MH~:,8}AXJ01B#ʁծZM-x-zvY\*;C(eBLXU,4N5x QSr :{m IF$(kNɁ8CCi뇭BAeb+{}h oT⣶zy.1x)fh/k.sJd-R>0Yt*rYnsXpog"b"#* aBhCȄU*FUj!qxdZhσeBB "'nx M +D"+ fZSfee;&H˺CQ*NM6'fA$r&.@1mW89f7&pAK@ %S D, J* AI}u CjP+Gϓ@2%\UADYelyF5Pz&ڥZD"gZ&#;*㠨 & V´Z)..1Ee!Rdr4}fO8K]A{k7"BR4ЇUSl|K*sQ Aq\T2Ȩ9Ad˰%H'Q<.X&_:iSCWA/f%7:QmtM^O $傄~ɞXKsk;8Ƀ&hIHc6g>2Ey8+M&ow3(ތۊuu^2\` זi0y#D63WeDFǗ2ֲeָ@Qz`9&]f;[S ӳ~[|Sno?V9{WB=eՑӢuy*y:hQ} KbFAN%T^ҫP0Kh=N+ =etdgBiDkNg؏Wc?NZ%C8)8,blblc?ӛ5^!k6+d~)+1˴m 9x)\9~Kb=']7®agčoprF蘌I:dJab֭"\rӥuiy,uL,Xx BPRhˌ0 s2"l|*즬*8%W@.q]:+ӮCfjw<>T2 &ʃ]vm ێI)-Ң刂3C*,[xdpiuoN1mX5I슉601w Lj HkAWoHۛtm7:%*l:44e#fNaר3̖Xg`}DZOi8F.-9Aa?,Fog=ׂ a-ydFVfaKB=6g۠tHPY= */c4w#E%e[\LyiXmx_X2xC[C[%2@ɖKt%?L!i큲B-%Ȅf4n ۖ}9#m7c6}ӧ)wѦolN/"B;$JNƳ{ytw -u ,R {ڒmCuW ;܌ćowDzoP;koQ}AK=ţu.\#EǗOŗ{kzO<)R:PDG.1J0t\rܨhˆ'V4/o^^ WxLBLVSdL+F\b1*+39:QBF6+newzU^Ϛ*PM7I-+KxF}F(]KsUln8^^óɢKl(Q$r>ҫ'u@U_/}ÆO# 0Dw2 7A8h:+q ÜWyXV)!I K(Tȍ*T{"p,qC](-˴RYʗ w)q! Ho1wY/J:&4$NDdj%7܆ d)-Ed(E 1㠔`ʬQP9Qq2 c)6+J ݄P'ӥZtH)c}6I^xK#X[[aMv ̍5Òv*F3:}4:$Qnp ,6z)Xѳd 420#}' &UF 4h |kKyz@`2Fy%8rS^x JFr=5"hJydޔL.x|t {T1f14>eVZA= 7 ¼@q2[ZdY-6R,@(z w|W ^K[{ttĹF+ %LأERv,Hz-"}-WX++\$js|4%ܒ," o(iXc)J 1͋q~HIt5|*l1Ah׬&憰K'ln]\F7y܇4oXhOMӯ; ϱbgo?/Ҋ]V:CΖf +R=o>AXuDPGՊ T#iՊPB5)fUhk>wUxHwW]@sUxp"q9?tUpH wW])*">q"9xtUDջ讴x5Z27K*NnfەivJuY7)*ɫao|4Rotר EV0oѲX7\|8j_>q.=S۴cn|Li(Lz)r8J;;(+_6 fnR^<k~w~BJp u$^7q n(ju踡Hiw-mH8|ډj 'bN3sFicLiEYnM!QJ%P"rؖM,T>_N o:8"w%uG㮚x4iNCwWM0yEwSfAc9I oRc»Ŀkl0SU^lsaI~q /-RBA]mwIcO6P%S}l|EXȋ4?Ź,!Y?ί'aicu6Ba邹v+k3c_Kۉ<_/Y^~>Al@sL'sxOE\±&=xޤOfCg(<9lu( NEi~P[=2$r *TK*MOtU[鞻=[meԌgC0YAa &jх՚j}=rjYUj%DTBi-SW&&ʻ^j+j+Go_|#=S?j|rTC25 OV5nНWHcփb¥d*GAW2QO0h?)uEZ1Q,bQ*YPP_0-*11:’c59%顢,R_Z&ؐx Z-2&{`\o )lW% UQgMy."A߿ wg[vv*}3t/#偿^~Z٩Tf>=g(PVAZ 0BBH8'5ETV|/n_cLPchE ď5$$YIqo^l{q+㋳:F3xכ.ָd$X$SfB%Y {f)7;5)QJJ<lMJX! Td0Ԭ&%.ʨ`jR23M[yZVsJu4)EIXe|tIIr FDHJ1#Bvj?7D&v|!R._NH>7@F}@|[+/jAH6LmNT3&.Z6F3D#bR*̘d,w],x0Fd/.+$8Զ1`UfTjZtVθSXUAGJ"GB L(ޓ.-ZW 5slgv+0u;U>~(WotwѮ)B$vqju+~. 4ע+G~a'@CLZ1k*JT.2,5EUGLY 0AnP+])OzYEOڶ[ b&95J:*_d,Y+g7k&G1/_ϟ6wqϧ _;t\O|Zr~ZVw˞;rnBx0!pFchŚlk56A:MMoEDdzQ z~(MEk|wlj#\3d2fJhAT8Y|OJ>#/̣y\!?!:[\:o5x2B2dX VC\vd} a:l2MH9һS<촬YmҖ/&@>c)X ^CPHjM$0ї0U3}*.BZxX޵z|Tf.e\ $&I{w2 'lx5oiwؽ?];bE cWyc=Q]9cҭ4aʙ|˿moGlrX^\ʖմm6!OPAٽA|g2'TG<,ly:$./O{ԭR(Юy!` `@zK]*|~v"x]9T ѳWMvzpj#j`( A* NoCp;@RAR0@a;Dkxhm J=Js1s315=׬qx_\Us̈Ԗ@Xɉ:82jh_ Zr_:E5ɾQz@kDPr2hNк}_T|a1=bvu?\h!=^ݱ8j8s?(.b2d5( FsAťĶ3EQʠ 4ʋ@v1z{%4YَCeUvrEmx tS~B xxG5q9:&m8x:&FravֱzRK bOb !?]boմ'VX/O 8;=-{+qʉK ٨ J0;DVc $>gA2{|\fӓp¼Dy' .u+a-/CYo֋6z Pcc-gT`У F+A3z]Z%pV.ގchAuFVGJUt"Bm %T@4L]g13#ncL]ɧO;pb}l9|x2aϳqǑ ) ZUh",FAA+_[K01H"f79>S\Ϩ%㙬,OF?bň?z Swfw (R' x5~Y\._GǭIA\R!-޺F6i HNe'7ˍ#=r:$'iu끖wG`}~YNqb*@n$z@U2l6rcwkxmhh5p"IcO[Ljv>жb2&f+q󥚄 h 8V_ON_0G=ׄ//7k5?<xvi;-V>>BńFT-k-i鞢W WԄk׷iqp<&me!ŒrU 𘳮d$֎&VqtJBE\P89.Ni`jM7Im;KxJ]jG\x{IZ=S :ԿUau#wuZմ~%AMw/[?Kǩ_HABQj;l3.ИOl-dAIUTRz]"Ύ=YOHUͥA:tFHP܄ѝj7qCtu.a8?;=Isg^+>ZVkK#!kFibPU^$b6V,ɗR1JR/O l rr1H^*Iep8;AR[R;QngATghM JX1Piؔ/PyaX1*/bKE1־DHOBU-G&,y,kϑpb 4EC1mrܢNP %B3rF9'MQe'd.Jj { [fwC[mŠi9}p^m{:?}a!*n@x٣s! (y ,j3=Mt/KMr#)Vu&ʯ3FCTdE>U.{k(P@{!G76I6;(ȚDso^o_uJ{v,qBG/Ȑrї!/u?crћVC4HCPUa%NtvapV1)t*DP|TLkk*hb*,"C_ ⰚB J9qA0ULj[, s^kgdMܖUbV7,Z5_˓xS9W}.gYҴٺķf0ҫ(/׌')ZtGE[7sn@ha!Rm uaT=,Gw,ۭRkt6(:}hz+o|X.et+Afu%G\8sr6۝ew2jo=tǻ[/OU>2#>ּ-aܦܼgnnwJܜ2eEX6_acr@;<'=SmNfiUsۧ" Xz!y?pclt#6e/"^Lf_60u%$O=~nImIv$ږ'dUWd=DMu, e <|xr!W$hos||9S>P~OڸiBsN4`erW#=҂ 2A#D$'a pgYmrM *GI L#x kbܭp]ŸcS kmi"՚MP]#9&znR64pT'@C[ h!#295 ED: /)D ꨬ-s>lYnwE#M5)M;4̼QA< "В3aIy:(Nϔzm4%D*@RNMrlg\p^CQRţ-iHd#nx0 .u "zH;*ksG `uyyu}ùQyл0n&$ տ_UL%Wemum}Πø|^/e[ ăWj"L:0XgWV"AI\x$g^c\8'$!\7 $5 #:j֨`S,[T: 1rkdf`pM`ʸNC>NuyÇ'WL˵/ \|k26B'kL$!JR%>X"L.F#-|W.zPoVK5gJF#L54 H,oET#!"vnj>q*mQ>0^ri_hp(~kiN 4:R T]}|Wkz@a{qKGq|T"O~7hoWw~_xϏoo}]%iNժz|d Wlֽ7f5Oq*Шb3 j_]>?L-kmhێ71[S0<;G2<hn񪃱?'p,d[~ODk.2/+7Gv|pxwD/רxT3픒 O*HJkm\h6-iu;2u#/"5"yu8@U`"!8Vj YEIi4:B9,˥ 96zGΞWK0`o4V ]٤=UHQN +|ŝe9kdmlH5#؄@u#K $2Dd(k4k;]6>D=DZS Ah@-ME :I4>iDQ@:[|1(WPEzrE?4:Ū?k@A5WrPUAn {s_56<~@4q_v߶Ÿ˼/ k\-Yh^l;4H0Q. 072\K.L`K S)iK`9'{2Br}QWZ 2t+.rU&QWH.#f_U]WWکZPW<ԥP-ᤥ{]5>4!aeP~ϻbn.Ưlnδ |T3k[E ~~m&׈}QH6Sdh_V{ njEr}QWH$;ݮTWR]E .I֔aw;afPq(!_ ~EiC'g)+:N m9!XQDwEȦ' ߬8=ă2^ a@Pl )AF`ꀺ{ vQN<`]bt!_&8/_`y>R5@"9$t:&YN؃Eĕjkϔ`u)ҞUیti(A ^j"XC#w&@()Q hSIvAN /Vפkll-`[_۟u~}bb{{"'}˖ 6jo"DUio"D3~}h}Rr̃Y{瓼8kG5G5ⴀּK\c5IIO ]؂(c غgikŜL9P**KA&QFN$y&%EOya=76r夌I@e$IeSG'¿LIExbOsIM-C3|Ý]Zzy>Yo=ew1pm+fiz[vYj*g(}ά?Unq=Xg翤0H茑rjCG@}*CvA w<vXχ; + En]pԒlnTTQx&@hBCj$ hH0(BdRkm#W.p:杬b103/g0`EYH3>߷ؒ|uMٲLj6c}u1Tt#&{F"kP4b$\eb,[ٱceyuHy|˥_v-Fsx1K/ƭLr69 „ ETM A(PlP3{K hE. <0 J HƑ I,!SzMNGVDhfl7d^EY]{^kNѓ b`/,k_[hr9n#_{47$}WSUGj}G^GGG>^o[O8F ~fJ ݜdY$SY 9Eyx>xs)mgtctī)qӞ/kto}-\~}>yFyŘ"(sh%[3I{IP#{A* 0ԇLqә(!Z{| d<)0Hov)gS͔ZR1g UaQbjL0ƌ<5U>h/glɧO;T ӭ w#6jzh_%L7<\HZmr`$" APX5 |"hVxd:Y =5)%@)ʐe(#(0d68cB@E%>1Ě&r4Aj4^+}n #ǫzX LV[!+T*w%G̙WR'JN$3Aic}P5z״X- j mꛌ  ^x9"xIL7څ1J FL@2Z0:)2L#K}N[wVaXd12-s7$ٜZ‹1t>۳LQ omha&+֕/Yk#SmK{$Q ' 4 SdvL<(R*0dnŬ:Na gŗUMeC:P)@,/(HzfD@l[DC8LK^F"`b@-LʂgӐE iݒ8;&|m\SjD1 [P PԆ*yHY],J$$X TȤ-&_ PW)V+6Xh}$CIJO0`Br/129Y`#ݪ@%_jte\ T V D *Z R9$y\vl fбұ),еﺼӶ>}묭 ܁/T!( ?l@jiY{2oQ7XhɘA 3FWu5R >$1FIQl@D:'Jp,JϜsjȪ{襐mnuB֑Lfx2)%m >wK:ckzcL2|z>ja$AQHuF܁KlD;Iob-8ONyx khk7klhk[[{ 43nk$T2:·;ƭt0^Ȥx7C9~r?)EŚBg(By̓)5^Tkzc+{rEWx JZjpJj $0&N+gj/ LZ7ϞM4Si/R-AԘT{1X !ng@^A H%P.7#^M]Os{%~`g|NJto~BI5YևNo Ӳ QLs}tY]zREPELɏqSq&ӫ ly/ )=M\q~鿞&mpv|J4g%sBd*GH׳~#5ϼGSArv#`#>.ޥT.\h||=-Yy(؍gwhws_~^|c;Z/:zt/|?~Ŗv1 v] ƣZJ$GK;?RO<ŏ. .ЫRt,tɠ5i#341xpZD13$)̐ @_4)Wa)趾(O-*̛2KGDPL,3"u2#XtkLgcT߄;j>_ح^}ZS[C?[ -Tvn9rԓyetkQ{BnmQJם۶NfY ?JI7h\膅wEv6*9unov~u-|kP{{?BuÚgk{{-'^_[w!Qzin5tׄuv|66`hpk:o1BOr1 bỘu^܀T7Vt&cIΚNS=طӲMTOĻ2nQ7Ɔn[nN_zN^.xoyS{O<:AZ'R. kV:Y|&DR}]"WL6$eҾuj2@"[[Y)$dJ[mFEH []EdJNC~l}||-}UݛP%4 K`(} d#wd<\td\Ƞrj.sAd!"o!-CrJ1Sv(50HFfP`q"Xle< ]c,tC׵x~Qk=wϮfi/tq4|ou$%ڠkGş[KRh0KJA)b렋i/f%4Ҫ¦ft&{{"@QȀ+H>4FflGl-ry(lu<CcjGl!$CF۾+lB'k_tti7P 1-~ՆJ -CYK'M&#E*$dž#cT,j16flÞԯ0*0 "6[EDh0  m,A ˄x5Y  !,39`+*>yot62do,P7EDR Bh yzT)rRb& I SB!ª\lJFE#\p!_]A)F7,oSB}|F+olV%U4?oק1_5K&H'sƐQ =={\+L\=JwS;Na^^.N:;gt%-3)h\aw*E1hb&\!"YTQڦ^e#kisy0\?ᑫ Q/I(A9s&FX22B;:~I^r>ѶLfA;P0Aj?yWYQl·.`CZwAXy):|{~jȭ柡Qћ@Ie2"81IsM2)mtF-5nz3{5n- |m6+s4ɧ(R1H/THtH0On~{C@N;R[]˩|6, MfIRdE ]Q=oAoE)Иխ{iee]L^"۶Y8֢UY@4up5*I-PT$7,YD}MYE + !`qTl($& գŢ-%A*c⬚=;qV#P7Moy\~o-}M#tWfsW="'4?|+R)1[Y0uPUyLfvşw_\VGzSa˽x?O 6O =2Y_Z׼@uN %B?OZ>ft9{A< J* P$aEmy3WJq#n 9M-VWU|)ݯ׳?)ţ1}=uǟ<@%ُn7>x~v~j'kw}}i_run-8pomJ\^O1cGwi9h1]őy1ㅿ,y:.ӓOp3E>h𕬹udW:[w2V^,gE43Xgj5ѳk?{WƑ / d>2`,v6 6O ˤBW!-6%J乺:_rQ~Kuջ^: y#u`i~7cUF#D~-G%9Y桖Ŀ;O>/?W>o^_ͷ/_I_=W/Οxs}W$s#,'=iZ -fݺF|ń3i+ƽ>։q{ ֞wXv>-Cݟt5*>q-L6yehY1BgO/M!nVq6YZinr⋘©nRHuVay\g׿@ vy_s?{2wDMktOidr7eR\V>8g0tHVZ#)O[;Id< Fݝ{GY]x../2 UƤ9,"ENjeF%6lB]I@k&n&ے,V׀^쀯5 8[OR ֚gi|u^Wq1j|EtZRjO*)ܐ=Il[g [H'w&]n {=,^( $$Yt:$+<xF&H@- ȜRPIŏ~>w!'Uч<uUA~`r@͓KA__7ɇ DSi5W/2?Da$(dЃ LP*CQkb<_|[:[|֌5lҏf),T>+ {d`QIJںR FdKDX)QZh_5=$hF`fZQ*$H9j2dE6"&z"fr6ZpoKOQ7젒fT,6ϴpR`U29Iauv BIh"8v*ٚx$Y!Y&kQ*r Ee"l(QZt"0lsy`)m<"L[;г]afKg3߲v!)ҚA0m5JFG9H:}u[x^ʒO,{btI)>i`vqƺ]lܖ;lc_ݬ*H[%]I8*i@F,` 6DWu2_k d'3.2\o>ۛǗY{?i]8C?巓駍Nϓ7h3rQkv]v،Jw9p>f)QF!G=9'GGl=G}} >PQ%^K=>w..=PڹOɴ|N3:lXHz:y~Y/F?]^P˳yz1sn:ͅS`Lm&F5wi^T'Ο|=8ĚԣA-@$]9Wv69?}yEvM( vA )!e ( &`Wja\Cǣ*sya=Z?m^!l9jg-., eRliDtC+\^qOWslsc ΝߕBi;[Uh ?XdzςukPoYq]my^ñoQ)SH.PP(3(6ug\@GlfTsT+g#h0Y X^# Z)}%`}|ĒI PC(v)_tw3^G7.SNYf<[FD[((h -`V'胎6>o)>Stxv2ބ4A6hK"z&u(GbyjS 7C!m21C!;meš(dR$r¹첉1%mL$c-$ъFf Q :Y GPL\tLMrA"ku WDE`c،5Lh:iXs?9_qb4s{SG,t]C8?n " 6 x/)Ĕ1/XWzqIT.mNPd_6$e&gd9 K3\|T LR&ZMT%dȹYqYeurPD@6EoT^cW X(ܲqvxU "p슪Ďr]v^'8 C*5f3]_|"R P2X "%cKRLZ!lk FQ0HӮ!%Kls9 2&)J{ad ƒ5*ybB@97eڣAtFRy]51)A>Ŝ3hH$bEA"K1ِT>FחEv%N/SC5%a^ `QaLbhJTJ@%Cܸb?ooYq>D ڸ;z !3ʻBTPDLt4h6~Tޚ>㠍vlђ}e,t*QMAJMs}:_NZ_ ܁TRkۋ:J;c^3юL(0] NK5ęln wn wn wnT t4"avrH*(GcHXc$Ѕ\h[fk 5\ |Xw tN7{l!6}x[)F8{BGn it|9cja#O[$Lΰ ]QYuE*{#5\V5zN]JHuO fkGyrC(22.hEB9-IMoY)N:*I:eSv."Zx %R)Zz+l2om%mcJrI&%ꎙBv80ZYC``V@Rl݌M,u1﯆ ~t})58dtϿ|ۻvԛ? TN/jLL; I^#vҋ=zp\ǁy0=mz?슾g/aC}w3>bY:W' jܾޝ[Y.K~ނ: ҹQ.,-INR4ܞv(/4i>;蔾s޴FZbd+K,2u)!aޚdٶE<yKR%|h%^cO/c!>f޼:omp[J.$al@f4 "Z޽`rCU@5)6gHX 0SA]E !C ##mR$Y፫1kt坼[Q,q+(bq >Xc荩٭Җ`4d("3OaԆ,kB-.,l0 L zVA<6S(Ȇ Q!(hB`s&"2 yH )uBCl.',9< LR^lS1Bb :[i2%^H*Z7Ixo;3L3yhg-CC7Yewn&/D6K-xeE'$Ǐ\K)9BoŠ"l)QJGNƊՎ [ {eYޕ#އc1d7Nde,yt$;VudeK\f7çb68@&]7Kl.Yr8Jn2׹qڧ} cQ3A:-0GKyt @M zwz8Bs)\ oFjb+t傊&z7Ndpm=\_:>O<$/abm(}õjLUHBr-2'"^ ˚=Ol'l_S--Ag!;dMO5 %-F$JRqf˴Fd>cZV[%5V@50rF{P wxݸcK-/*Ŝjn=Vp}gs)MUf>4_R"@6G2NGX-\yޤ~ ;x'ؾ^ZmVIifbJ2E8H0wDW㬚3=vgu>t9>p«(蓫5B6/[Op_u#OܻT ]}QT=}\~Qs]w|M*tnqQ݆gFAoXTP}]+/*2^eo=?,@x0C.w=XR8|܅J翷p4S^JA))= KIr6mkbp8q86%RHPh:X"Qce'%#vc \JJffڔ IΔH]rgW3ղ!ޚ8+/4\Vd㨷\rkm '6ge^dTvĖ&%މv3.yu3 Ũzgxg.t!Bjph-_d  M{ȓI,]֥[ܘ^mtnk]ڶ]Լ|U ɛgJ.=}7_Amq׏.yqFy|CS՜| 3ߨ>٨5:Tg,R#-%Z ɛ]MF׭&zbX^|,c & h!I)ϥANIQ47ٟf(Q3V՜QbF=v<^n.giz`f:'_s4|bg|'XL̚|-#bk⬐cÉ6u%k` {\1Q(6&R w[3%Nk 3Èd]&pq[5Xyx @X; c~mC!&LǁϯXևm}pQ(.616"q^C!Q{AnGnۨ Ѯ)|*S/Vd{7w=;neel^͋FO K!F0TvTS*3q>hUPt2D%..jRȅ2 3sTLhQllĶ ~k9-:tIf$P -?aYNe]łPxk5ʌt˵71LWHź(&;U66Ki$gbt"H 3!'O`x\2怒@3уBS UV<i(kC  #"9 } [!ܷJ@qR/Ձ93U=XWQc\Ҡn^J24`&7 {t ^bej^kCMM $"O"Y1*e"P4+Dk%VK9c0Yb~?H,ևזPo/Ϻ)o?3+.J/ZqXs·+"giTTaRNHR50',axB`_\&w /V⽾!'@aLcnCj)9aL+)n8L{y8-{" fr4y]#&3mRJč pZ-NQRoS>L[羋)/JO鲸p4-{Ac sG^QJsP\tQeFZxp2Kn c&z*̹ncݶ^BZwvMcQ>&)f>fMB]jj$sU0c+Vę^NzxUg3ZqRȹ2GΫu޷ZI :]4#XG!W.pE9=EކcRh/U/E\|u˯??}yKOyӟ^?̜` (PK%vnUϓ7W-D[UC}an]jWWs5|0ʠZ ?o_a@njNW&X$]׽X# 6 :egXT)jXA.weG]K|F&4雜 TϹ{z)dqBF@ vOmg wI H2h~X?pÏ?]}F$Ԓ̓bvdhmKgam%BGc퍺_sY -52n2z GMHpPgIiM2L@e&T+P[w5XӷWW>[Q[pŎW7>{VbsVoj)zĝ0l8*Y_em8_q92)9ΪҺKLt2p !>e+GfVf;i:DK]i-]wCi Gc P~>iCTq"dBz(9o7PGdq ],c"vsr߸+qŗWN_`F" Eqih ٛާ^KoAZYv߶ 71ڞqnw)~͐搫;&ڊa͟lL둘݁֙V5.? vWb\6(Xeƅ@S՗ouTGO#""@p5(̊h!h'sQ>qS@0 0<~K0y$6 J0㽆?L+Mxn;#nk,_.!gc RT K{ЧGq.;{y$,ZL6SG}Z▵fkG-L©Mx&QƗY:nMi0褵{Dy̭<3, FˎSlHi2J:K-G1HxT`&Y5:pBFQN8e%#)'_-qv9uTi{nls^1Ψ6/:=U} k=_}$*rS&}s l0h2XL"AP)Ktꋌr܌~uCP^ _/^mh۹gyY̩1̣\0*KDM$ Hsֲ]5:kgjKs7vY}W^?BKq/cMSlzG;F3v/){ }FaQu;>pP}|P#"o+7|?|py?-߸o0>üAlw_OZ6+*OJ6cvJ2E8H>jU5q=8…^%A3 b!ܻk6Ee niZP* ip|AgS~jK'V7Z>o_J>m vO>> ĽOVm?QJ7s-d lwQYDFiyZ"\Ln/"I`${/QKo{;:!-$)E"))Jr+k6r6yFXmPJIjP^X*n)ޕ6#b>dGa;Y`e{2.YrKrU7Ö]lSlgʶh&84, 8em /Y;/ чdQ?g/о˨[ vV=5Wv);:Hh$?gEA2JZ^}ķރE+-%$Lk}Ș.cF(-#|.+ޚlj]Q8Z&+/{C*X_.wnug1wTd"隮 EgR*:H-5Jg :![('D鉽tf#^x_V{'}W^(#e!B, *Lbm7*:YCA&W%0nJg*3\ Fa5d~d].N/nfۗz16'UoC >o: x[~-=ɢR uVΊcޞu'iN;I2'4jbl{ߓL8H)!d$‚tIh[?`ediJU($v*ڄ!P;2e)J&b32vFH Äj짪ix3峡iLoͣ_W2H|t|3I#xmo&f^N}/Kj3Q(֔AVc E$wJHMuEؘl/9RMO t |ڐͤ :䚉"Cc]iw )5E9Fk0B)HІ 0eu1<(h$+P!H{4&_|MB B1T= d%H|LҗtUzk6Wޕ8x҅f*Wsj4:%Y'M e5֊HE ԉI22 ٣@2Ȣ#:U#bgmFˤBrH #wԁ/\hٚgGU" ^:d^f'[Įq{ޘ{c(Ǝ \xYo sc@n6 Ia]Q:"w t{ ӓ琤A;jK.ຐ%OTomǰ3*6c=i6m*zqi _G[ioS}9{\*>I.cF?SU-8%voO.|n/rۈ1)EYE c3] >CC }bY((ڣJ |_.;/fݒ>r?4c, o^ɾ1 ]t|^$ЋЁ` `0ȍ gwd@,Sa H"YRQ¢IĄ0&nk9@dMQ^D(Gi%23Hh(1huQr*_`fosE 0tԶ{0w}s{sVn8j~NmX8 OBːҮ X5`"S母_e *DYIb&GOuZ-gd&NPP݆fw@^MQSQ`#m0F钏XG& 9K0ZGDlG2.d&7)2ZJ%F^DC碥bT*\0(R 8VZ4 Tkdlj6*Ͱd< mc,=խ?vvS񖐤Ň:ߦiDy0;Gl15RSMR1K4JA:Z%eS^m^JV65& Bv +l E%Qƈl9;Gy<n&OEm}=YYZD fstҕI_LP`G 0ImpF[ e49ƚ\z),G(dب&Bc+{|gV{jwUi$]Oƿ;ncM}y TZоS>f I7jP0[@Q6 Bۂ&qĩǫ,s)KOz+r- 5[qFjps V**FCٸL6GL|طH%m-]7a^ɄAm;Nu#6꬞jcG6`HΫ5AJwVo{Z%K[@7ü-%ՊyIhRIl<0%brm6QK]u;4]c|qܯzÇz 7mXAR6ɥ RK'Ot vd}|vo;@}lrRΒrxлڃ$VbR5}Oj {>.lOFwk+!=er22šhLtUOS{ˏQGE.h `cd{?gVH@c1]QD15L,Z)>R׊XT"=%F3#9I@uN %B}.Z)Lt>Mg Whտ 0ʐf>̜˕R-7 _RDY]8dglD_瞺:́dK\}hgWz/vwx>N?F%Wo5oѢ vgeMrG{}^݌]O2O7~\'g/ƃv1W xk"/vs4#Jnlն˨1^f]X~bE41wb=ǣFO9vܜJ&]u۽^^uZ_0JG 'N}3e*g#~.?ɳ w8Ɠ.ԲL}1`, /Yo?s?]OɲWF|] tUd7I6Y~O& zOlH!a^]]Uz㛣?G?}xWpF:fHtmX9&=WMsTޤi\[WhZ&{=UMvyIWG*2k2h;~Vا39b.ZC٪Vq'{WlӺ}gX'*~LQr&4T*D#X/:啪yFiꀰqe̖TOghV =1?Zv0c"~VDY #њ ifOg?#WyF `HbQ1FSJRxRYD !e .dFDx[jВHQ#\VM!dj \#g:3ĥ,XM$DǸU!!Q騩=jjGg<O1xNkdiQǎR5z$+9Le ` +?;D<K&v ZI/T?vjwox~8O8>[ϿĔ "WWQ;}w/Ny֣陏`\TIfoJGZY.3u;O嘳_;Nʩ/%MD-袭4!޻NBǏ. -^?Aˋ1kkȱ cm7~[}zZ;Wy&5[ԡ{|w({[:"+fԬnX\56%5WM;4bũŽ3XcI?[0 _Q$W&1Jn, NmBsSXWD۝i]+Rj_Y˴ AKrܿSGK'H'FpЮN[,L"`:hzyr?&"%Ţϐ5_ObsEbe/8ȕw/%˦?{{%X7D\v۲eO?c065g[Bs B(Ŷ5 o2 - [ -Jxw( =DVyB}7w{T7YmKɔ.eDh!,&xJǛQ6IKH)`3++RHAP.2hGG,(czfl $e椌IdIr0xp`֨`S*KjrAC?z#_̛?Ws l]#ͫࣅ100CJ㖵KFޖcNG LĄ8HJn(-3ԒkJTTB3)-xp"LrMp.dHk0T9* WJ1R@QV;VG#8Z5.pEphiFPd|MBZ/?t9q?rns^;Ԩ=4TUPR-j>^4WQCP"" E Jn!N{^g"ً6@ZT =;V15J:BHpz h ]]LZ4[Y(Jqi29#c b(U4r!boTcPGz4 tR\w9Fj\RA4/ q@1K8cCt̢$PV$Shph@޵ۘ䖗> nlO &+ErQ{CJUBZ!Iy,I)jAhP2$ѣIhNt1*-%ְ5FΊrynzooJLhLV餒Y&Qw  23Bp:q5DkM$"Ф|y*?V+Gw^2f8 cDK&!zfEZ qZt&x րZ~$WmY%P:Eq4$h1sIdQQR0U( ,o'H`r0WZBP'd3$)\b9 h)g8mjjM3e+m m9%I})q"|]O4,|+P@ 0\j6YcX >m5:^J񂶭x4'S0VnR8!7Aχya1>RVO YK$!u6y$gTR[`~7NT)KRΥ0q,L)rPF䞘 gʔxXR&Ad"tJ xbAsC0cb<\:qO A^VפwJoƔ=l[s7'@ZBf_.ߔ aO BGz"77I9en%IYRY-q$O'=qܴn\7g9. ovϚNڜ=: + GK5im׉$ԁS/H~ZhյK7v"g}'ku} X9>gDtYk 4} M ]瑠 _>֟*PKg`iW~vkjI12SOe^ p_DFBFdp%b:I>&Mr] iD QZH10aRrX39U\$n/ 0Ne|o9VW&bFbi<]l@\#{gΗ1W]uV;nwCiG'35}=5Vp[YvpX>DN& Mq9FEy` !%ECNW䴳FF `#A=I6lҞj40A)J̢P%OvhK8 en {[+_mV<#Qy]ZcYTG/ls+=Naۑ=NaQ)f(l#6wzJ8³-RW` [2JnԚWWBtbDSCH]!lU&Wm Bj9L|EV]=uɕ"u .F]er%uzR)h3TWn|uK={W\E]!꾹a}uUWQ]I ϝ{r1!?^cV.~q8wP+= GDA b_}Š!bՏ.NQN2`44TjFgo߹ߒ-fZ|V%C mzEvAsj JSE{^gݴN {~!u PK|μ@v8m.0ثՁϼ(;ggqGwcDu\s•W;&(޹2HKH(ZS+܊&j'IKdW\_kmg^DyANg9ɕIdem#KcC*)1Ʀ)j۴KP[cerej%tK.Sch)J &/$erak3l3+M۴OBF]erakUo:_8gW\5%~Rs|R-3(BV?UOlo:;\ߒsݾ׷tQ{1^<|@A ׯt`ż{݋@gmvN;:7UJnnue.$Z [q"#aKPjQ"bڃ](u +z ? q뒏J?uzwNWo^^㛣?G?}xWpF&HtmX9&=WMsTޤi\[WhZ&{=UMvyIWG*2kw|88][+bS^x>^/ҎlgUN]Y]B& (8"?DE/2JϟJhw+Wgښq_v x>lVlי щb9s;~AYJdGo$;8Dn@҃h7F&'H?Om0V,/ڜn F}`?u0 4NqrEwˊB*Fr@睛sZk_;sJ  $HV{]T890m1Ơ#k<,MS!9DWC(/.ȿuBLl`Qa@AiBP6hNYn-޲؎H8H Ӿf;_jM8z7" ކ8$J*DՈ.Y7Bgj'ސϞ\C>8|p vF60@r8xR Sօ\V-9lѲY, lh:)Ud'd&%v6:!R}UAaFG1k%}XDŽq*z*mwx_A0"x{v".tķ;>td8ze0?5#7OSy{z$/ml5. 1krhKF>1wNiJK Qo/ߗ%0ɲ`cU(꘸PЂkS(ڢ#+/ *|pѮa5Ԓk\]v V5OŽD1OBM@IZ({M2.kFRP`l0bC{'A3u]mv aDٜW# Հ;Q*_bw3a<$YȊO#G*խq;ЯB4] +hA8g`1HJ:ʹb. )dC%?&EJ1@Cr|l^܍=؅M֗I-;6Pb(I'iTmP[5!DlszČрOukޤ SVin;_vvѥd YIЫm]Yǘ%UsͨiE'#%&U k<*-\tDe*41"Xc)!z/k"8#O#أڔJ!PŲjDɢVFbY<-9ki TP{UFi_=u ){kUNaɔdKbHNyH$PCV7aNvHPT Ϭ S Zy[Tqt]IqcuӉWl"{-s&9\8UۘU-TՍ1QdV'9\hmvDDƪX.>b|T:ߺ=CWw98jf^=`*ض-Y|O΁"Wk؅:%Gnuމ ;(di,w/6ph %ھg}|NVᚳ}_jsIQq<,Է2Frbp#s 7K4zPBq93[Z.&vGWxR+nLEY+B|6ƅ#Ӄ%2~}v9|^ 󴗓d=ݡtzj;4_]c bԊLD2+V,TH2&gYk#"ƂG"aME`q[YN&g;fWuW^}Ă9ˠrO\1FG,7A[{l: tֻ?LE@G:G?z{-BtZg;^@DĆz11 ۡhH؈Gnl66/@$uVȀ9Kd% NʃِQ}fkAIݹʪ"hU% 6`ԙ\J,3N܄ѽ^E;SM'ۆ=r[G I#YfĔd'JA`}bUL)[ AS)<5)bDb8 ۊȨ+b]K{ ANRPe \"Vz]K䀙Uɠj6Fb@@Rlʗoh0].wYkltȅ֡$HcT*"U4Iց; 贤|]-cX|jtJ6fO/m$RL,QѥU gIٳbayɳe>"c/}zqW].N/lG:ZG|/ .&-7Ν_H\RrCcBѕ`RCWޢsɑ9V!ItOIL`{twޫD C*U7E'"YA ҤMLۥHUdr%$&S]nܿ7בI旁u˴V s݄ܴg(/J\Gtt$tLx^:[,* ɺp5b#0\/=@rpEPM1XC|BF:1u\l"%0@lhl -={}AɑPl,7ޥtTGTi|N7W?>PTV|C [A#ԊTY㹄P|U%Pblh Q:zihߌ-Z!XQ̠$X8k#CU IAlM%Д{Qt&vd.>o ?q.8K9?pmy;\!bFrbr+5DTIbbیI2jckF j"xWf܇ѷcs38Ԟ {#قo8V|OF}[Y+]k~ފ neل"!gV=xCym??wOOmouiI7 mޱGVR F9ht)AGfc:I5z%A:aw.D[֡/Qy0}  6*Uki.^1:=|p3c7kGy!H>GWy܎vwȈYHV_px]LnzvbM߰GU 6d)uv7Fvz7B2a[xxA"Gj'g8]]:iKύRk; m#JY72ĘsL1X@dR[D &JBe*yQ gxjnoЛJm"Lp 7#1J$GME-+Ϡ %t񼜄l'im7xZ֞,9rҽl=r[ :Cro'JT1zŬ_/~o!) +I2%p]:f9(msQJ 9)873IV ;249C6 ':bep:PXJLaE<(n' AHRVa S띱Vcd1豉hj4BZ"Zl[#g˔PeU$sMǔl5˻1jrW[<v\w6TkG?z8VϜ{o];׈HZF+G&)BwJ $]L:Cן.[00 n8iD< ϗGutˣᓷf~a)R Oi˽T>8?A?3wa?rzP%KmSQi/*Z)9 eQ,R+9̎+,/2GNj臹 Td {6 m:ڱ Q/bY~hڲ1*lr^'<د~҄gA3@QbT]Lg8]Q+I3*"^^o+Tl r8/HMu>}9z`W5s~3)G_?s߂$dƒ1'K#8+i02iYvC02v8Ɲhiè NIaLX\ۉjqde f!/U Mm> #ؾ?? gS/Qr]!UB:Ĕĥ)ws !*cG! "(jxTq8 $(Ű[)rmmN)u7!']xi=/K:UUT""naZ, A^`ʍkh-a%|JI%bdF/cLj[#ndlOWi/ͺ- EX(2>xRh:Keˉž}J{7Ti+i"J\S-01&s" T`gMxnۄ6Mxnۄ6Mxnۄ6m=+mo3}wɞ澻n6sw7}wsw7}wsw7}wsw7}w3j羻nfD}wsw7͸澻n澻nw1j&8ɍb8kHX:W,juz7|t*h&+A)3-ko30Ôi dD XJ1$(g1oXtZM^'b4=\){2&w]vxa8#{J3ܵ[s8ҳZ!oks8P'-g{G¢?40L}Xl#}2R;QjeT2. 9yЃ @#@ ǜTX!!DTh浱9ixKƥ 1;0 e H}-)QmGs+:2,"Z)cD h$meE%@ B ISWR6$NT-]'=;٧mef u哹E>'P* B\d:MgʱLDL$-V4TR AJLL̃3gkF%&P/#7 lm4P,\1aNLF2_ a[V-"`zgqoR0#4'(xqfms9o@^oe>WV/o.R밬;Kcһ㬪> Uf6JLWH_mbF{tj룫U⬷nh^y-Vқ~,fJwIErl+G냅7EwؽGС;.mڄK*Ljd<)V2*.тFQ4`%טZUVK}'t^ZUm)\hɭ:5 Wλ' vkc "A}0A*1j 3ꄷ˾ >I:Ao3I&S0.IIJ֕1@-$*)caYiݖeP}0[ẽ`@ W+5\tgTp.ĝttaŒ>^JΛ棩sG%T8 y Қ)1()(!K9 Q Ƒ6| t\͑zW6>Mr%˼'Oy>%K]U+Զ;S*62L-kޙHLP.*ҮdRh)?rF>6;6+1OeVΤOgcgV>Sh+Iu%&!)QVBJo{e2Xb#2ImPڊuڀ0!S&aeC*/~  ^#v}Ҿ\Eu'sa( F ULo,d9f709Q \$EA/7S ezc4'#$ 3`%Ij%έ냺ώ,sӪǘ!9R\T9#zQDg$&z!a̝aHCUejWRڜز4T. [Ru&߽N=Xk0`0@KVd9(qdQi^y[D^cZ̬BF: eR͚eww=:2#UQaJCJ*8i2CuJT_m_]6\0Yt*^sins^N5*y yFGU.@ц *U eNC.}c0DN`$Yr6pLd6۔՚|6s(*Y5qgG`w;rDB36l8$fN A͒b Knqs2Aԫ@M~+3OAA Ax, J*qR W:G V5cߎuI$\UԁGA*\g-)<dRG"0-*X͉7º"'iR+]]h1EEfb4} 'fW%&IV/#Brzv,Wml|-k=A= ZJDQ(" ɖlO&rNX׫G=cM\~:6C]Y;DMI5y L-(O&lRޞj_zuljP-_/R#i?fm_o$Jgbό ~7_ i$J{T | T޽K-5&=Ғ] ǍU^,^ ;rTNhc9 78sՓ6kktIKЦ Tm&&CHc1N Jf$ @F5k|ƽƖ2d-ܳ3K~[<)7Y7w EdV!{{(kO_e& 3\&XF ]a;cΆrû[K}O~*|?!YI&.|?|ǝrwvFvmxd | y@ճO~{@v}Т"?%r_NAxtmvMmyo^WKAnE ڻn`ݕZ@-_Mdz}O_0/W] OK4wOO4&WQ/a؁y퇾l@S%* O/G50{Do`AdN%ҩ>0}齺i7Бh@>T) 0U J0f`V̯Ѣy37~xSO{.^Bd] 8Ziά~3[c6Jࣴ ?_~/2`$/x`[w04n+uS%eUi*htO'3idј~dk6+Q~.̙w1$vp>{˺mk?m+d[RVRPXXm#5En跄*ٖ]-HK#!/T| 9aB}:&c,RX%bV+< j.c!!,tQYX\\+]f%S g'$y|+Ƿ*_(ܭT$%{q 7dj3~z9BD8OQl&BX0hvDOҲ,,Z@cirVQ.FhF޴P6(\m>TU;(N2t SJ 0{(t˿\h9FpT4҉  4*pF>9鷗kV](m rL#d- f kb[:/9d o1AeT^hFZvoɾ4w Nnz½mZ ouvq&oaRf_H DW^RTLfPMpa/㣃 ×ck53ǴtcB)i=(LA e%'Ҫ$>tφl]-$3B ta 罊?m6Lj1ξԟ"$H¤CƗhBJhԮ7y8\56N<~Him;ˬl8.'OaYvu˜@ޒ *df0My=5?'QKiLg.??tapa[[F9H* d /ȷgDžYl8?'aVN9Lˋ0}bgje M~ر_'j m:oiXYY{n=!bdžպd>Kntydz F[QZlx6; rr׿WkXŬ~0O4X6-rv;ި-K-ǧ7446xxcL=\lOf#ϼf=]NoͮE a*_oxA]ǧGFmӔ{b4%O|2}ijNJ[)mSEDNv%UGh*:QqKKAѤp,!}YS,K\J9PШI-u +4 IrYA? Ǥ) nrHI0Z)'? Y5 *$#B`|e҄nBzrZtH)c}]0soIz:Vbt˭Qp#8Q:.)֫vb4#FǤc:ՁgeRJ6Y-JmģгddQhecGcJbG~е/&V> ӁJ*#ї^ 5JkƩ$enYHT)jJZS臥D7:='}T1f14>e`jV`ou,87B|we4|aަveT|.$"'KC)LU&αGGBGd^QQq jqEZ^THRW/*HXq07wa}s9rh-eh~;?df-Jd mdJox6p\߾#+$NKuG$b`BTlbylD<"[j>nv~U#tr"YbpJQ/'N-q6+/m") s~&/b{=P=`\i)dIk\,9+Q+EbPBA;̳y_nu&-R_*:Ygt@ݸl< ;5+ @Y t&X5@grjFjn3@Z ̘٫ .kgkݹb"[ŒWy',#Ua:UN9%'8ܫ&ܗr 2 7w?pVI.]hJ9WD(a"@5㕷vlu*\1Gֆ@'N-^KI؉_$rK 춸STdTdTdT#uuW̰˲F5ij@Ơ$WPh j@jQ̎5 Z%c4j+$QuuUp D٨+'0j9aj3r<ڌZ6rN+ݪm_=#B u 1*LSRk L%5z类2J5F]erucRkF1awT[A3oƓ[fyQ}G2Ԝqy8U(goo=oIs|}Cֿ\&*_ of}`6}GrBq*X\J#/ѧn٬J2U3S3m^Po?}B_xо*IUHVFeo%U$X9\7'SҰ%|Li&2h;awuzJ3#iU&X6G]ernq*7;k RW )Y;ȅ2A:b%1IV܈+8T[)۰m4촬8nJ]&LV9zY8N\fgͳV_g>::NK1}^3%7Y?M\RǥP% 慨6߅t;w^u}zV&WeY 3Un_MN*_n-)u(umyi_?|l)>K*~yWtz-ܚ EaZs|O#P+u=BST:Bw+1Qu҇D Q|ڹy%W0ux}{qٮbl\0U=0"ALjM<(ÁɄV@2 ,ZHA"=&bVsݴ`,:댹K϶;^U[UT=FK[4tK4VwЍ}i7O@mqV5~ް'B~Zeyv~$RBk ('_$ǩ eچѷWhvPT~t5izƷLK )Ĕ1PO@/K-c v`Y(Xq2ZX"L.F#<ܲpP4gJF#LM;4 H,oʭ#!<`NO@ܦ89 nq 塅u=um Duķɵ'ќ禸e񵾕/-ѠȘ 8G\Gv%rb&k"s +H%VnN4X Q~8#z-#Y/vޟob`[5wV wD.3\R5IT|Sэ񇳼3>ypez{*K~G'd ti W V#&R3,QCJ e(:]luw2Iu\ ލ:ʇq*#y` ϺJ#~JOhg09xJ<4wdmnr7WȎ}Ż?sϟ?t _/w0"a| ؉9&ڜCxC547mC V笫_g\QW@|dW[$R8˛~x||p}ӋO>tYMteu0 b~}WoW(}3סT!1v@`վp9-yQiMlvfNw83}|1Xu0}?k+"<!u^{1/۩xT3픒 O*HJr}DxYrВȘS#S\c!yX4[BItpHD1%>`H0UZCBbVQfz PBfG]i6'6F"gʄMD .GQUS9| Vu2ޤs  [(ފ T 9š21V2{L%ж| ,_`muNmH>ZHe_auĊr'AD ι+YRlB@)Ka |_y?.eom͈mVՄDH\kJ=!PKwQyt$>it :8gߵQ9Ew_8Wn{4).vz YCUwx_QXK'qK-߽zw[S˴/۲}6۲ǖT6֛M46kηC[:m1qBU~!y˾?z/^J )eҁ:㽲" J "@o .4q>Z88-/yssA{ q l=AG 11g~Yjb4:ACc5L© q&`(,74#**ei3_(΄g=|0#g61IX `s&˩&T4j{m@%be&PlZXh.Phd` `A牖4$O56ά3@wy>^Tǣv!#Հ ˰u@Ux.jj*ngغN>9l="Ǎ[>^ν,uiK>/-ǐ5fk*i7{l.$8HKU60)yH9&-)@$z ٶ$!fIC(2rC"K Q)Cr%pQi)fkxrV9kлxQ򱔵*TRFI rC 23Bp:D &#PIb+uυ+E>0ꁂs( AX5 s+R\ EHJP6ǽcbIo]|̘Ł"@l4${ƉJF%G`L8jQGU6x&VCyR ,_2bpJ$K\!lSVR;0Q}շ#媊&ѱ_m,oi L$*WB}5m5:^m[O*&% -@ Lq>"SDxB*S8Lr̩0gU!c5f[=SjBз0:z.|&!L.!B#w&@()Q hm9Tjfx9ZmH)8֗' 2bTm>x BA=⊧/e'{36*l% U㔰,ּK\cI80-[Go3N;ϬIDA IYe)ĵ6u";'u i)ZҖHD}-tL5,Z9G'^3oQbZbyb}ոWro,r#BCAn$fCwPل\ Df2q*QquP1tP1m_=hYplg۽ϫON5xmGgLۨP2g+bWQb|W&8Z.8jI 6T*( bg [!!JK5 juhV Q8vx8r޶^ƲZ,wCaoW{*Ԟ[-)&{ȑa>%v=XI.l(#K${]~nɖz.ٲG]b_,zI Z0͘{oL0W,l40֭ goCsKĠMH8bf6*V;;5^; t2uYVW^~}P7ߧhc`>B0,[++t2i xJNo("ƥ(d0@TMw} * 024K2 ,MN0Y\ )G)% 1:,0#MI@#ˠ!rcJ2Mg* knH)MFpTnp>.NW)LLh*uFh.EBXFb>u.})cȻI\%zk <J:&АH,#Y%<1lr9$M\SA$,Le $ 9c Ai#YѠr@9Aآ)dlW$>hB]:2K鐘SmvsG6G'RX(Rt07Σڪ@fv9jtH: Yv1A"l,[[`IDGϒ&ED^HǦ)L.Ҵ^B>pයuV n ZWKn(!(u2g"S 7mjJ'kP`U2>)\lF-Q)RȔƓw'@xAcZiͼch|ʜ{wNZy`EqdzdQ3e |r筯g\WlkfC2? lgoqKuڂ!$[=P*?*غ8XG 7(ɘD@u;|l* @3jI"MI4 Yl(h'To$,q< ix 1*}0L|zDHQlYɉ v%8 .@ѓPZEi5$bUwai8s;VlPb7=m߼#5ÊWhe_K7)\J7 p%ɫŹYyZ.)g\1gHӔ+p256$Fŝ쩭 meуs;-}U9c$cPX3mϵ 6VZUԈxomtF]ɳdR?FCa.mN?8l,֧n]Z:xmy o9;d_] g Q Ț Y2 NEbC / kZ+F;g<[T '-b%dpUc}#}gAI//bF|!ⓓBn7;hf25fqxQ`*\g$'S'@I2(r,ʒBjOLb=kG^T%).1MpS`(H4KG?~\H/ANNmaT2Ú g5|wR~P`ù? ki8~'AבPuׂʣΆ',s}Z|IOXרa/I֥|WTNo㪑~ѹPTw/~ZXfyZhׯ+!e5uI8Ҥ]G\* ~ӂnc[HҸM7ݤi:\Ѻ{h{C0囋iSokHmjBݤ(w`=މm:ֽut5{4u$|j]M"y}k(*.(Vzl?yʋIHva6p;nikfkȋzy+ՏV4[s#w.(b5DRX\yC bkn> ϻ$r˜\~wn*lW[=R\{U*,]4.Ř|<ճ>O7Ľ|{fa7\7p#67nNƳ-4oߓ.`=nk&h1jx_l!FcnʻXY'&v֒(qёpevqÖăen|%J[ hwelǛ]Vs:bP$E.1J0t\rܨh0 \6*K4o/ݳRCxPp<r1j´"_,qoT<0Fv%|&%d4hV["!44??{"UVYslvRNqY۬A'hilZE$pވ-s p/6̒CߟcolџFVKܙd1Ă6`›0 2 q KnL%+\5)2-ǔ.} {@l1#eeP:T^/te˶;;uw&Χm_5v1g yv}3.g˷&TpD(}bzVcKev *;ټ;[Z \k\1JJAC.6Menwzv|X;w*M{K+#7t2@oyuBw8vuc[GN'}Xݻc/zzwz׭|.bO}C_z|s{]\F7jE9B . PsL㴂F&g$O(Ap8.xﲣ֊#(%EКrƲh.h^e_#=ů9_|MA%=JjXgD(fSp\$w irZfP0ԥkfCRJ][LNxR#EfCk]Q-Ӝގ;r59y;Æf@UQy{L9 *1<| &X6I9LF'0M1 P59=1h1K.zR(Ll\6gp&E͵t+p@62Vcg;2ֳUaa5x,X{,{U% oiȁ:pȹP\ Y^3bK+34gEB& ^C41Y&sHc+#v5v#㊋y(]M:ڶ2j{ vqL4}2mlĤ 3n&+=Ȏ uHydr /:YĒ#AHE+UddَS(x(XM>XG76)rHN&Gc<9$+!&2рʖ"{dZΘ6@$cֆKg|BIs# %KrCjX

aӻ_6Jsܔg?Lȕz+NObX0w>~dO}`Lq52vӜL3I!} խ vAg}_$@=>(f$CZ[Hkibcg7We7Vvee hjav~h~ֈt/[@E-:ϧYo~~Е^O]yA&[ +eG|ztrA<,LGzIԯ ^ \v-"nC¥=\}pXî^=q &qV! ٫NBuZu̴y`V!Dh*6Hubp,Y捳dxy*]: a;=iuFRE;\6kmKhd rLF-k~_~kq[g]7j࿗ݼS+chQw hähtf) 9Zq-Y1*B ~7ӯđJ*!|:DsA|zIf_ ɧSZ۝Sf".v_0~5[iz5S_W =7 Nt> =t}iPRLBqB2c6^p+!0O`"i%Np[--DْA"dU~ŪRm6A;ܧsc`)ł9 ˭.bXʅ]#-Ys;-c1k+k: N[>jTC] kCoeӨ<ÿquJ#պcsJ`׶ =8R{ɱV.Q -9+R 3hcm wm1ͅsc̱b6bmڌu|\A!k0;jSn߈ J)jF 8-},RH#ɢa{DqV|چh xXw5,Ҽ ?YyZŌ{{腫Ȧ5+fvQ~Ȕ^%W \ߜ[R֑™n26`:QFsOtz{ 7wha9lLW(%WZ4]ԍnk_tgEz>Mɢ4"%)Ѭ9(JHriQU4=Ke_t^Jv' )T Iͮ{kcsȾ<,!mF*!'c{l|Ò^p.20(U p.8ܜcI{PX0rAJF~7?>{ %ltPY6uho|+|@v7B *48^B4'7mȤoǂ F : =_ͧqu[ڌu1cU3|5g\" zSRwOo1)]?v$qVu3F]%ru}RWZIN]]%*kN]!u% ݩg\|^Xyn zk~+?O z_qzכfVsS}H3J_~{y %_|LRNޛoK`ekDmKۋei݅e6^-1‡y@!"1Rbk غVV#FkQNQ@5H9{͉\v6eJk~Sr$>#uct>D.>wY,Q SWoE]):'ulU"WuP1[TWZ躼Pf2Ml K/RΛUuV0֪-I,d]t!dTa\0CCE ͱ].l2 suce`k#,JEрcb 9MI ]I@5`W 5,u.8r*j3ARO:Wغ~:j6LZb<03^ +]Oo 8Ӯ.wïx# )+Lġpu3ОzYu'F#E[#" 1X'L`H gyK#ę^xeX&,e7c!PKnӢ u0@ ]y [ Uګ>T0 E5ȑLm()bLDשV(b0(o9$8MRQjEw܁kк.k%!RHD#])9:ko)mx1vgF]zRLao.mDʍ2WCKO25b ,Ͻ&u1$\ZX0x /l؉\r6}e$*y-В Ǝv|Tҵ]:zsV)V2y;KFlL/\e~{CfLryå @\3@cĜE)uVp 6 S -7إ*bC/T& {3h{P>}۰ȥﮣ?'&o*o |sߛi}{yYj،HDDA9mĀTBScj޴bh DT0S"!&"0l,,xGI4 ;qc6rvqNN+axwͅ8U2"kR9b mooTd7-ME)+2Oѓʏק yN~ C[$b4NT*2łIEԊ*%ZHk,.12Ζ)|?Sy!n+,m`\U9"+}z{l%0oJ/6eś BX G%:RQ:#L/^ΣN1Ő6_(3)jR3 fN3e1A&+!j o ìseq ج; 8x2&`ɻ91FשȤ;n*\Ex*rs|"ZN5` aZ!$.lb)hE}L䈸ȣ"zCK'0| >jwBYX 2a8$chQO2r#M 4L"3K9xoӑ WƍQp-*Z 8H [)(rJAO99'V}zxu ,7ML~uJ*߿TiSn)K)> P1唕#h[ޯaX&`Ih:6.˰I2318h j i oVS&oy=ivKȯH[ڳwѪwn9_]fԥʛ8b51YW(8௳x\ܕY㦎(ߓ4УM6RO}{طܷ@R-^ٽI{I/yJG aZLpMz \Xe+Nk;T=%SH3>K*DѠTm$)ԅAfV8^#>^d-F10r/R'DkM8㏍O3y4 (['(P%EiJ[^8^J,|տ mڭ4oY)}7G&-Uzwl86ǛNU LSEӆZfeZANxSt&jZv|2W]=!2Di#J!H &#$8݀㨉܁sb#(:=eQK"J(9(ā.^'U(oYf.Ktȅv.†{k@)]e;?`H^̙ͬgN/r,Sk~.adEf:MI);u#)# |O:MP޴dWUD|;ԑnV&*XEb*0 y N1Xۓ3vjC,I/9g9GQՐ@8jkIr>h 9]!MpLZR5VʪISYLYb1 Ht.ZR+LALa0z0U&~/>`m./N!ޮP}H$XRDUu8ٕO:tU*Ē)Eu՗jPC3Eo8&~KANRD垹eU,5O[CE (Z6F0V)M|KC9jXv窢 ֡& 1l8!1k@%vTe>Fdc#l@}bn:ڊ Jcb.h}Oȓ#=㟭}5*_b ,q &xrCQ;XURAzNWoT\]c?MֹRa&+)y[Q7Ժ,8-@Ka ӓ^`xp\PTޟrU#5E;II);Rݠr"Ji)AEKzjͨBYpkC-^{v@j42a\eӌcB'`5{32ou/.>]۲ZuD|I  A}l.k#qT"eНX)k:lZ&OS FJhK3$q˵x0bg?b˲b jӎcQ6Q`wNhYF^Ut1{󰤹xLsz 3Kޝ7j8{G`>+\QNu 1aåSnفTq~2匈gD,\l%NbR&BK "V= TGVKA[ϴ3T}:9Ó6lR](g8S7K%G"Hꌋg\cTL YDӡ&۴"SygTLHVU팋OôX<Oaw?6]λ_fV 6|cy^4s'S#C3֝:[>4:z)֬*kJ.jd]˪ 5ozZk]̽UQ͹\Yj0Fe03&&lSe83z?INwLTҳsOCƷL1_bjj|i%bwsb9N xZjTDnVxbOlT!D}晿(Ϥ3͇%wXooqSZH1O׋_9.tsM#f8m$['˙L ר69-A,P\#D25 16%T{{Gv&|Хm;73 uiw5^ޡ|iޡz\Sc2TxoK(lR-l%Ib?BGW'AO3ܱО]ralg3yA]\c^K.SoԥtAb Mj;a4 v!VJKK~#J<*m/aE,}"?\RRk和9 TLs"E'0)Wָ)Vleti;35ccS6O,-pg? Pf2?+/G%@$TOxvp-`(:h)e)5LFy="ϜrDê5-K 3{(C D3 0D  z <8pdg-{֒ ()):)d_-6z1"nUYUS#oJ̱Ω$G]0۪Yaռ5)3"uؾ3Zx$fѮat*ZCwR*.G^&,~X˷Ҳs \Kg*2P KJj~Iɔ 0'k]sTH@UhVXؓ]2fM3Ɛzd_Ub,{bH b)ˬ 0td` 1}!=V RmTFŀ2(MȠV>T ˎrdb nzA` a}/A󌃝!Q6 UH}&XtKJ܌2x!OVF %Ba[MY9 C\R֡h0ۣ*(5( 6p lh& VN ٠?X,x8`kqOP8OdgNA^R_G1A/UQ#fcHj DzϦ5!`wIRsCw`⃯[6o[lX_2myw Z` nXZ$L>Fu`y轋,}r,=* J/ҽJ ];)pycX¼.…N޵\ٿB̗dm0q&;dƛ;QOkTDilSM$DSd `lv}@/Q%ǀTA"&LĴyE>X\(Lt~XXe1GS^\'6X _)AJV6 )nEd0m8ڂΎ"*+ɉ⡫;͊ԶMpYI J ;>,HK7ձ㫓;!N‚.&jHĊ6 AX;d܆;:Q3hT&o%ྫྷbT5ʪ+p R ٺ)!A(C{P!QBG$h(S`j8 DHz[ }F*N6GdCª9Gu@ИXK4BiQi`|k%HDڢ  T5viEPQghH ?҃e8bxc"Õ,y7j\pYTfN$Jٚ5'7r:rl1.k2K%޼j@R\ ^@vٻgH-BI!. yxt(2cmcHu mZi+kBŴ+罺~v1皆LMƒ!QC\骅 ÛITr0 ]˿( Xah{y.j #d`as6C28fѣj@Xh+G5JD˄HA(@`H&P#' La5P߸QaݼI^!#rBV bʅs 8p 9v!E^gg@@2/ӈ0#A |T9FvavNQu6U˥7a`ebBt̲;.y\'?"H[dċq^.@@s-rVI&Uһ~ 4틶K.W ˽x?O2ēLmMZ1 @[b3.88eo ,$Lgxh9_BTm׋ˣ0[qA1IZm')I'C~[,?s{l"|vrWϜLx9S/9ԡw9~ *6ҷNy~ڭ/_uҖ̗nvr;nj7Ζ'izzu/ϟ{w _u~8/ `He D.G˿hz4whŘo]|2cs{@;M',iHMǗiNWQsyȦ0˭'f~l&1]LT)I]C_"0.wT\sc1H׳OkGЎfҩn^넳6;M3{LFp `響\]ef913sNdvp,,2֯i=X)OڀLުh17~+dlt@XuȥzSS\! $i^m~&&*GQIY`/P*XƃgV&$^&(Pr~q@[02N\[a;/ F[T&J&da}Vԓ xtKHL?y+-s"݁=k!iɓf|;Uxe v0MWKH ߖCbD:)eT!׮ H)Kl;[NE+Rgofk>=(=z1c$!u qi֛QZD8}Heh_{Tfs|sdr 7q/l~6u}ڒi^ӿ%Bky[5>g⸰ q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q\q9\r |j!Ϧq㱵abkvo4$# s|subz_G(6D)[Νa;,՜ H>q" ^#HD7{ɲ_]-b;2Zyy`u+CaAK l׻k$Z7RU#L/W}~ǟG)h?GQB6FSM8`@5˂5(hۺ=ӵZiVE[^jF|nev?mfS7)`rF㚾KVƁ{1@|oˡrULZ'UsRڒYy VHU`$ 8 Ἐ߁}a3)gǯ&(=煛r/zo7%k=@X8OQl@!xguh=-ʒ>FZVQގ~ {O˞DCvԼ{tPj}Mvpƹ#ӜU7ڛcoC>Ø%@)m.t"5&p$HD$8-g6gcͣ3}iЩ ѫ1/){u7qoԧL--I2>JB0A¬5%\{oqq5qvvͼ %/tNPYqw#őrӏׄxW]/C/ǛeM4ΈKtHP*CL͂Yd9,Q,BY2 `N`WGWzRhNtƱkm0_h(}C U6BJx/ z9]h1fyPHT@ &{fHdCy-u$XB "K .iM&9`'/m:Ę/7߾?q]jTjx4^ hAsB %E3˱ӕQʋb$?ZӼFo92x bf1ML7?LgP<#[$ݳӰ|ǜcI `:}f ۧĠWW$JF[0%&Zٖ.V]F\*#[mMu^Y!4t֯]mm9ގԧJ}S|- q۪7_k{4W'q!k֎pli0: #tIooqrq}|Y 񌓦T-KOzh.kͱfOUu)4ė4sFTA[C )}D邜,-!I]2-XYWgKrN߸pmY([ҷRam a AYҠ sRMht7ު@%z.fetLEJG4}(HT2,SΛgGXϒƠ"£AHJ-4)t gHhm/g<;Dۃ=A#PFʛ\(tYΘjBF&-u:yPJ9x]0Yb]xJ>֜QJǔTyǬ)SU sGcM d FL1`FE+=h_+PM= U|X׻>&iwŻqq<< -o8~^n,ސnaL#_M_7(~{\P+ŋAV5u+UuHljո]qfۃ0 isCkJ7Ni3`o]0$K#˅ J) |۫%nEMmefF4JFJMKRgurIٗ^-ǬhM޷֮)w֣igjN˥S>K T*>IgP&FgC-A*IXRWvB$pZFet1{OB8L8J.E3:%DHն#oVf Cml ` ^xdTX\l/ .d_ h+Xd'`EkUrYMRAsr# 9, 8@U-H=9szQsu&nnjm"~C,hSbWgq\bCl`&O` C!ۦ. BOLdV9f"ŃH8&Ե_o$ 1dBP2CiyL,( #(Q,# Dv=&n{ؒeQb"V[ZD[""q+D̂5I!Z87 ,lyAkDKSY]"Z)$J(fAx.vG &hLĤ90pj7&nCW.ΗYmh׵8ŭc-bm`#/`@GV /20Zf|*r>rK..=:=#=}=r7"/F"#n~ v>i=3? q9a6loyuwn YSnG>M+]rSo)c49try7WV߅x9_eoO͑qmdunU/7dr\&ff $ ٸ)F6NX3v0o3 I"EC(.sB}v PFi HEY.=+츍`J32&Ys!>C}`F9tMQ=p8{/\寀.YgXl~ocm6wfQac xDxGqYVl`+7DŽ hBC5Fxp;l}cozfUjuylۗ5{ }t8r/R(L@QfLY1,9A\nwȓ3"Lu.3bo}'SOuع;+p[K۫at/%23}tYcGk\)-WJl2gVap٧(Q XǛ-Ʌ/3_ALu]@aNDaxR¬K^J%5;7NF5mA~HFjh"R#q|j $8gWОQ]@{hYR&1:,sXkSUNs qY^p3C A?A885hɊ; M?^}Bz6s)𙟿_FoL;s@"˜|Pe K .*1hBѼSf"![ Q1IWqڃa,8uu`ϯK\cS۾rIvzͲe_/S],-cݏ DU pÅegp1̊(/YsoeVvhe 2Cj[.zb9$}CH=4 \CUt[N'8,̓; zϮ`Gr|]ɼFzy)©D 3*E@*;23烶^Ej|RY][s+S~9 8I=ة Ę"lpx8$%$JU"hݍ 'G(3x%)>jKS$]x5rv|<~⇙^(qkřW0{[.6$GreEw]wɺ(&9U26Ii$%bt"H 3!X%,CH/:~0Q]Q\) LW*@b ֠*(P@U Q_Sk$p/qpK 3@bv6Z[Õ~L݋@oP 1ϒDžT?Ղ}Bk.d1LE嫂#@MR|jws~) u??\fO*WWBy?]!D|*]?Ύ,q@ ٗN%vpFpc8˃mY?kRHz!$nfzr)% r%NH/UZE0d)UE+pQ,~U%7*M"&bYA-res+Nr]JU#beL|*VߦO~=[g׉1B='} Rw|2[ns(BVr>]ߝ|r0x#YߓKþړ{:m5v#mfY> F +3g1Ar6Edbmlu6Ȧ^j%1t8LFr(ǨR-ӣkz YШB}Xo0}\ ?>|??}Gw߽;[u30o";۝Xbpehko5;t-Y'|~eCn(MVm/J8Tr ~q9[V3Ů9͚|z (ӳ޸uU?\jp+bC4T*fzh#ef;df\lNÔ JTfe8 7P]𣦃o q~@.FgȍO~ϾC=uPK93^vd@6ac%+#qQޑ~.+Q{=${~BI[Y*Ds4ΒӔ0L@2iw!&grAkNxa_e(8Ž(2ŨW0)F^ m)9ΪҺʜ8b !tӸ&lܺ #Ϳux!T=S(0;Ǎ4ᣱyЂBADHQT#Prfh2+reCĮ.XX~t?{@,.N`PLA^ EmJ wnMK?]ËAɴ߆f>S+-ۜt՚vͶCZШSuig4WUϞ+1T\RȀ 2 BbK\vꅖl]/٬¥'Jd""ZmS$j%.b  C\J8.p 6 HJa$PO104 C ]hp$TWlqd\dq FͲ-n=dEUAǁRj2 y"ޙ`MLw&<\oޙ%r_N WK:v,R}*VAejx+ji*R-s<ZLPmT>@ХYW:'I/\R$p'.@^HBNp)9Cdc$ S`2&wN_P&IT6>P\șy[aLKAtH0[B6CRyk/}7R ,I0$Λ9#ZO@B¢푌`ZC~ k/:xkּzgP8qV2ëX ǓFa$J) hW^ ))RX`)YVO,(nk֍ ʣ(SDj+poLvduU!=L ӽJ|ڧ멛E&h4+4n:z$t9Vぐ`JNV&?xL"4z@V+#T/R*AY F-21`$VKQcZ{ U6y&Zmr?=]9QN5_@1'Va+,G-G@}ojeQ~Z~oJA. Qx9siX;(չ=Ӛj"LET"RZcLIuc1XNUDEIR-1 DsjahhΤ3%s T uJh=:q4|&_|P1Po~T c_cocdA2CLPEƵ&lG) IXCehdWfl'oޞɑV=p=/%SuĵbW,5oڴo = T~r{1FgF!NmK-(-Pt :IgϼТb^ء /jDChJe@F4`'Vtpu^4B嚍;)GP yH 5Cs%6@Ja<tF Ɋ wΑ֊#m4z i_ƈvQeULku|GYe2'SVP"5%"Ā;vK ]xQ<$vQti96%&@c=uZfI*!&$PHl y2\wJ=Cl]-)GO^AqvDG\B4.IBj.۵䙫q5.SApg$Ol[)k#Z$9S7WM0N)7n֪saO] G[ɸTb__iӯu.Dܥ c"咋*`_QbfVZ*+77?(@3J k7ԝ;aTYOOѻkr즀Ȇ^kFsdC^#C^-ɻTP2.tRg;DWb$%ƈK/C9J+elHt9rMy&fI ՆiZijs`wo38`]x$Ahomgy-;q7[1)qD6rCG3þjƼӮeD8,'Lsg7o<0m8$Pz,L: JF!qG9j%Z.iB:L+n?!Z /@ޠ7ywdJ\|(.Pe4Y (Eښ$QtNv^1LM:[7t-[*'u][ą{&6Q_1ruR7)o_4\OpOib2#*75+!3i 2e)]{a8Z)iDJA<M j,PfA J$nQ, L } {KcF)RYLl[#gL'-[ݽ24=t~25ӻ*n}uDžd|R-<K\<} nv]Mt"OSCW=7$\+:Fѵ 5g_>Te134kT Q*VDRYwٿfJˮReBh 2Ayc%ۋijn3$cBp^eGɫ=?s}P$F}ۛr`E,mXU B.2!Ynl ^0oJA̲姄 "")nNZb~mئ7S8w:jzW |REȪ$B42 + B` WF,l.ri@Ĺ\ ` ɬhI0$dT0*Y1P?3UFjF0XPvڮ2j{%fr@SR%BL:-ZP L"!ٗ: ( cCS+@ܴqqx=oef9_mx-׮4./W2IY]H&;Nq2)6 qK&feq%:}ŬCT:d (RJHr KeE*#(ddHܥXk,(= Eرlqc>F!o;pv9:@i`1~n@2%ٗkŋgW?jx~ahv:ny̍Gs/6?/hPwʾ\{ك*B߻#?\j)RS2nlە@>x9#Tm5wjoIw:t6ZYU7d Ǔh^Y&bgWϷ5y}o꾭C;˹5 )~88Œl}H ihYa!]i_!Ol痸&[se4CFumM߭Әxb;w4eVJႵF(hrrSHA#FnnPwvayMk>sQ#/Fbi:G2I&#c2,r <~aQ&ʅG=s\nϽ%s S.%3!ôx)i48[tbWҫx|I^HhˬRF7+2q l_trMޕ%7lIigV:$GggZ!tuܣ݉>U <2/QZ%H!8D#VD uҮc˴/xS~F28yeL3B– ׿Mܢ }DwFSd_Wخvw+{t0l#/7,RVZ65m˰6O=z$%K{11BJ lT[#G+si]t$JE^j=>xˣ-<:ކ];f.<SN%k (yL#J#Q$T5"'BJ 4N, lϊQ0Ew`鯰2k7&!ts \)ͬ)?z_}cvH;y|(\Eܦ[w^vl3B3d{3qQw3 fZ=NyyLTL^I z5(| !C stkW6l`ҽ>R\C:uM恼>$J3x-&s g7Ϝ7.ɿW2>tv=7)kSUiOUB抵.HLWjü{nwCs:ӳx~V "MT.6egBlMIKKL{.Fx3 哬VA&ctQ tV%w!!dJPI.1C$ 3MiWC}jvlՅuiC+]OV*qӟqnaz\v fL12e/Б 楓^CLIV2|ɾG'GA3խ\.#0V0OfeZ|rs616Vdi%]1:D5t!F2'Q(Ꝛqj?z#">K^Ȇ7&y*0l3bIO0fz'4 xuaYs'<Ĕ*'nqJΔOeKrsbgF  (EUWg/Y|}\cSᦖOڱ6"tYЛ=iWٔ^YZu?]wu~e3şL"OFܿI9m|6h0jIRdRiaE>ulXiY)bxZc|O ɞ\K j)Y0J#((`!y6ŨOtDR6Ʋ)d麍 6gRc229R'g a*l*(0VrEcV. =YJ:_7tZ) wsOE0ShAT'/pb2ȌP&HR"l =͝.DKc-67N :<&K3#{sE/U.FU6fp2Č0G6] )gS6Ld%ph-DN1uМvVM= LۢtXJbVpI,HAz7ERpYAM>U{n&P>}+?=p HL3 бQFR3ttdٺjUZ|tLo{?qquS*@ $`>d=fIcHB pU]j:uLV^4ڔ44#x,fMQV mhRMom""$[PWEώȡƧ+ߢhJs ie2 "p€K=1$}7z$cM]uTӐCT=A/feb0zSf~ʷ+ٻFr$W~y}nzTa)Y(eW7JIR֑Vvv&3xc C"[lHƝfo!@Eð8r(oEǸ>S;Sx ubҙVg12.UݑޱZ=B3Ej &&VQ^($I@1gz4ۇj$ǥw7#l _qRح1bM9}\PV[5h3NdQ]z0ޖgfM5Hn"#Jьd3k,P_+Lwv1v]ŧ?hs!50qdi0*L`"Z<`PwA#|Bt\S]EZ,9yŹOrEOo6f4=i -<vωLB?ոʨ}M]ZČ!>~<ypzt@؝].:涪tnd:/A(lv[TCju>3wyNPyJG_nF/wpQ0Y102Diiup ދtcm91J$-$nw0G"hU#hQ9R*:<Wx-,ߵV;<٧(-~EYȳT0-DA=g %lVs?[(;[=XUKU[**T +Et邲5tUBйUBɻ/$lNH{<%Sv嬘}~8;/!8#sGMdzyR ~(&=LRTi1&.?ʪ EAQ{y;pMx7WA3E 4̗xj4糁[]}nOP_&1aXPa8`TF]s,M'øG~Z+ a=eW.C*KcL&W$ rrWqG*W/o IQF)M{ȴ1KaLղԵ-{H$֢w :5ZyvY H(򙖜eN)bxЄ~CxS5\Q[V}3$emNX}Įׇ[iD<h?LZqK M_ M T"} j}QtПy*sEcqeܯn۫vj"iF渻ǮQkZY-칪$v@|t7W'\pWmAA7>Ӗ%.󙘙YdͿ>|@&xr7'Dh6S0f,YJL&0"/h^$;o`4 )4'[* ߍ}~OE繬vp[uI u/W9lMaǢ n/Ŭ5=84J;_wa3omem߼/j.߽q)Mٸ} +%`]`IRQt]I!;M1J 9OTt)w!MTp ~CM{v2hCc"{&CDd0E4ER%nbi1g;Ja8L'>˼~5tD`0]~2]o]|ϻ3 Q葉WލFG= n]6=xgf7K9at,ţ2‹,q T[U 20_GŖ%9χK bNܢ׳\ YN>/Z3i>N??rc$YަZ[GHG(L-4g`Z9m-tJ"UZa F+QSFK7נ`n,n_M]:2h >7KS-Û--d{;A pLJי0QekCFWK8PYIpNR"wZla"Z2z 3y_IlWAć O#ݯ>݀kTGP{=P¢Yp6oLTFدX 3#ƪgv5@o*nm4!#2 2?0`96ΟϼKHKt7~"ȮnЩmng@TNĬ̧eM f7Q1bm?)t<*ц>bIjw_yӎkJ{c0UJp/ߖF"UR #z!HBO`"RSFDBH0<)n:n qыָ \ܯŠtN2?T^p`[9ln ,aq {1Q ?,rmd,`xWȅG%:RQ:#L/ݸ#N@ 0Up;c`FZTs,4d%D0̺F&Fێ63|S3oԤ9XiU?%q))j۬e[K dCR$L.^d}WN(F8FQԖx-ҎnifNCVr(rJb2Lo#`ޗkuc|ᬏGq07霩`f! ݔnBDraKF Ǿ̰wivҥ'Mst8Th3UMRmDPicB1\SL5<9V2n>i׷E6\2 px SUZ08B1Flg&\D 0SPfOr(jfrHRIpxaR[E;;NR+4Apæjcp6U.!p;0?ҫ'$%3kfSYAF|z\DyX/ %Hjm C0(` 8 Q_؄ A*Hh5FҎY | \K0 rȳu18;ɮ )5H\ܢxģ` -I{sv٫O`I m7öGl7^0om JaYi^=on]`+=|hw>:Gá8 9Y J̬ҬQ)ڔmV* 1ԷoH\R- h 2AycEpɋijn3$cȂhd`,sq2!xrYJf.sx T^2fΚ+]-" ꣰CYIFRvKȁ,X)]dC^ isxT_rte}D5/%yDXA^9M)X\W˜;*Z9RAc.ouibru]]1-zʖ歄dmWU;__\#5sWnMޟFnhk,tq<~m97S5O/]W.oyuէ~^NtWE[ͥW-\%\an"4ޟ+t& f 3.4;݀qF9,Z |Oҿͽ;Hwh/܍2(dcS>!|^9:ŵ+Y.=+ 'J^z[kIw3U08>[CP&* kM { en}z]y؜\mW 0􋬗{JQE%Ya܅e^*AWx#@ ' \ޖL]Wخ:iKհt_Je3/]hɻ2PrP cd^#K'`&d4)H}v|'yy[9S)UvOa˶,g1_4W$QRsև6Vdi%XW 5FG諡k Ȝܧ%?R=Q~u;17Syы*-ee=w}&ZtuKo%gez.e654Fslrܔ]`y6YmOٷUWfz4;[b/>OlfҷlDEQ$ \bAIMl1g0ARr$p˹ B>3y-Q3EEr@T*N@QpM"B s,VrfΚ{wO#3KNbCC+|hɒzV.brj=mײpGwvup@tigSLJbp2bF! z<{A*|ӁPx;IQXZ-s$;m3:@JGXд3trw0mP%eœRnuVܝu<¶V'۔kA,*hYjR.h/ )@` -O4@P|P@#HheS* ffl!x;nevf)|9n۹"jj.mc*7Kjo%.+F,-FK:bQTH>=.dKKluӇ\Vg9>+uT:pGJ߆bD+ʌ!?arˏMVPɠ;݋'g?w?;珿;G\h cɩ̄.LQ5pyE+Uh*oZ4kEͧT:ꚏ[Xv]nKhOllm;z?]|n?Ųy5lWʢmCòKӏO[\V|UiԺGb3 l^Oa9JLb'JqϲD+pZ#a4 9!fRFNNC!=Qzk8Z Fs.}Yd<2} ,Y,09z/TYlW y夬7]&.W5xt0=O WY DXTQ"PCկ穼\L޵/|Uͫ.D\t_d:ϯgF׶ yH\/%5Ǚk~Ykv?*yls־eޖp;iۃ>1ta#4ueʾ_[cKKqAcuґ d fAmI=Fe^ye #d0cLvIx<SN%k (1t$'r+ږZs!(=`b g}Vd"#X{9,EFڻsyfC ƱM"Ǩ)O>wWgp"Cgs}y 񘆘,ۧ/s 寈MnKeFOsVMCzc*s*sWSe8Yl-Z1$5,.$A17, <2}T{Sz ,_g''F~alŻjhQ8.5kmaqpyyKS maeY='OⱭYMFnR^liz4ůnf#71x|4[7Ўeq׺m_8E1΍ru,[\K@9"ʐ55)^`$7 7}C%Βֆdj(#MS1*s jq)P<1Psq<4nLWk` p5DG E<]DqhKKlQ֫|;2l5{2nYͥrGb\(NZRcIJ܌tDm-djkOYJ9-h-wz7ӂv 6%,+cޫd/>cJ=BQ]q3F5rO57Bh0DAII7ʏV5[ŠΎg;ko-?T~uG bÈQPN$? f6mJad8U&p&[{Q?@6`RvZÍAkeUȀ!bԬbcU?c0_ZGjۋŸqҼS@hIY:mEsPq04:3htfZ@9] MK8"R@-&H ]^蜵yԒF-I U14f (qSLd[[0$oZi|G C=r{`hGp6XZ,Vg]9(\eS/잜 |\cgl~ou-Bmȭ-&xc:|!Viӑ'&3M6q[z$l!&!ݏ-ɘ1w3SX a`ddSFYn &~]|Xݧ&9NzJ3dY\"wW0RNRM#X3MP!xcY'+ɷަ|5;lS.}K$B?mIP¯]z^l=.U=hz=KIRF2ҐbA5 k%1A7~qɎ&%{^‘)qIvُj)9_tcjrL _[5\Y?,pÑyuyܺ;p+^!?Iiz7ź/kgO ȧDDkI.DBDKLŽt%] L 8>\uftՃvk_t5 w yVWp}CWZBJlLWf̪Cйt] Jho]WBInu8c;ҕFWOJ&1̗J(!Z Fݑnt%Mi u%+ t+t+u] mh^WL_uD]'W[/'rjzP6h.ߝykf] o'fD3ϔ>Gys'o-|6/r>1gڜà9?r;?kŦj'7񲤏=N1Irr7esO"WN>^o^tw}WjBh_) .O)A7_f /WWه\sh;9=?XC6>GrBxZHlh!'4`O ԋցo]WBdrtzt%Ϥ)L1J(a]-QW9e#]10`?\,кz3r{V;U.@_WQ= ay4Z7ωeqtV]Z@hHWl] ^t%[וPjBߓJp-+u]IP+Mu+Jp] -5+ j2lGb`+u] m0)5UW ԕ% O:sb4= ճtQӌ;{֛)Z4S㫦i@mm~Rnϕ&wJ)ai54~C>_8;ҕS?3w+%|w_(aD]`H+(b\] i^WB*G&t+]1W<ڹNu0vtE{V=y4`4\?IfЧQ4AWЪGDNI<FWk] -u%ޮZ2>⨺b`T] .`/ZJ(Yu@]威sӀ=v+ ϕiekV]}]⺧ޕ׍ׂEWBוPZj"O[ZZ^deg֜jO(ܹdr;vk7 V63\nk)aОs7vF],*l$Md,]#4h]O$#wػX#9 \+PQ8#0^t%dѕzۋ6uŔAZI?}ۼ{}{<>jOsKy]iN5y]k~|0>c+^Fe/ #VQv"L'˫eG;l3pXE sf>5oJLe>P__!?pAyɽ|Rϳ ԡ__ͩ[~7#ZwBhs+8KP0K8RdgG# Q~{O9{o_3GW _Okyo_ \ e=p 0Lc"6RVc!I.Y5`*onUAK)&jUB"S1kbQ)h)T0 Tq1g\wwl'2vCuTBcP8Xv6bUJ`M- IZP;&-kE|j-\K:̕RS%z,&G NZ>^@?|t]]4P;Pd 22X'e+`+XC6ɡ9c{sYn\#fc6FX l>9Ũr <lgnaX-; 8uHew ;&YAфCaDHib(nrާ\UY8zs)r\fgG(Q" ը\Uj`#;C@0$@]Ld IAx9ERmH,:uoԵHMI1KuXg]!(dGoOM6deGގ@f#_f F UCk\4@15>#=>1&#TϺێSQ`,C)9jD*ZuIBwp 䁀:t K'5WPPl5khG6!XX-;l,j3 IȚ\\"(Ψ 2S(ͮʀ V;8*jVXT*JBȲ$P>fl~)ئ(jS,ؚϽ0n0V =liVЀʬ$"޲[6RSң*p/GT^AcA-AmU(a@6 W|ZR0 z0dkRac=A;wd j6mi><]͖\GIT@0uvtrhѓХE8I0{{'QʹvBmCYk Q^$zhPI;)_o9*͈=Xe9nJȀ-ʡmM1WdsC<܈VM~nQD{=jP"˕,]d*2(eFAj0F-@OOdPL;PqU߸YaKh*TOۉ[W6b]M57XwHN_*@Q 2=Pʢ#Hmr3u#g=uc @ =JW6"R 1tFΩm X{uAfa=Fz`IQ{/AFL(X*-l]Ρ~\뜼Y%sP׮BT4`joZ 05^%kJ6VP8j* (FW6m8EO(Is!y~)H k|Tp(G=zMW::݂X2\b$*mkE!ⰱކ`ݨ>jEr7D|9\Y 6I;\$Ct.Xz준%!Z`4Q Aygjp N}CLkԋB˫y\onyRa*lKSQИ)ȶq"GqիϷR`St 4Z%6`6obHkq7-.,K89i4ӊw-^.lr>e:m߿܏n~Лnsyɷwz^NON/|B+-+\^bEwZW{gç_/mnnvƇж~z6gh㦭d%C}E*Jg*}P= km(< J'X @ @b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X |@`I Dx@0WQZ㞼VZJg"h@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X l@` #RQ:!E DJԕ@d2zJ /Y J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@zJ Z3I ;% VQZVjJIJ V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%Q}^~ ɋW ZjJ,ow ?}.v,/bpɈˏFDƣ.#.JYK 1\`gձ!{pEVjp '8""+27,*:\V2\=G pPps ɕy4Cs)-aϞN:7uPmO=T /&ҊޜcpvSAiO>_JZxhn'hhliظd5/ْ&Nz/ ۉMOd@/_\]O.Wޠzմ&@bz~Vgu@ |?[v+{%/&_fggm7pT]I!Dy_PLH'>ʹ\AI`c*s`=Y2WMC֪yYi7yvEV:W( `\:\3\iJqKǂ{cK!\ItAhsDpE{s4pEw,pk1cY)-3+IpDpE{q4pEF},pYi 3+m4+kHElb%7Mӽl_zɲ~Ջy;?zy k׶(y؎}5,F4"6-&_Q [2cdu8YrC2V&u4x:Bѝ2r'ܭ$;9T[o7mpt뽘ս(WnE-iSdPT䤽ت:6؎|~c=ܩ? `!$˺q~u镫$>Yi>W[,:m*S8L=8exmn))$mךt*ExQ㪬!@L)AiZ`6ߜͲyeAkr ݹf[v#u5`j HgwĥPf7m>zXm\7xfao7 mZ2|o&Cge2Elꁁ/|snt~2M0j):qvr۫U/r/g{n塳=gWt=')OjH=sw g[ɽ1 p3ac:X7E+9ԝκz} f̅~tbW7gkw@v|4fThcWdm5w鼹ͻ܋ZA| lb Gx3")zTo4yhّ!⭍JjZ]tu}Pup `JU)x'cӲ<{Tc4yht=IЋ%֒'yFdddϤ^#T^Dwv,{3wz\ߏ6h.M7nҍ?M=7bԣ/6 j9o[Ia._nLbl\q?2f(iOMqmӮhC~oaxey{ym'.Uڑ;,qu;qkoDcx1fEWm]ɻbI;w8cб~N\-v-m݇:}(O 3Њt>& cL 1ݻk:K׽!7ķ>BR~v?`Y^sPM޽nQ:|Pۆ'L>4:y}i}hcjbn1@߮w ٽ"]}w.!'p޸A'V9PBWߦ,z;\~֠IO?xLڳ=?ܻܺ{(o3DྲྀrrN*Un\RdY)oePJ-f $`Kdž,7Gm̢鮚8MwQ舾D=M]̋f:R>ﮜif4'fDh~0Mهy|!/顨mР8mf~Ͼiʆ`Koy!꣥2C]8]Y2{ǻ_}+ GԹ}S 96G$<ձx*>РÄ#-+x갡eBTв1D=wmti1$BR1e}T)*Ť5`bRak߭!ta](v}LJ;U'nf}\7Bw;qEn{nNƵNN# Z0͘bb1gÌ}?{ꄥ0=: hX' ’ɍB--Xmjl_kyXdnνux?#=dN*A|!hyuͲ]eNJ=+p`8-dQRzDKQ6X0qHQ-=Ġw})= DrQ%MdIFPqQ0@ޥhR8"0}]i$)ynE Ho1'MCI(4DId9"P'8;MRcZ)a 1K"SYͅ1d`ʬQP9QqZ@9AزRlWf(hPrZtH)c}6I^xK#Xd2 ̍.ruN*vcZc:ʠmtneڲ輍^ Dt1p,:j"-"" k4U%wm(Ƕ9p1h09*_Sɯ"3j_K"KrH5]w*,$FYxF/nq8rdBnES<xxx#WNMzdJK̞~%/h=*VZ32gu$rhab,=)U=bFluD幉I8o}e#f5q+|߆j3n'%:7 ̼7y~P̽2p(/cb7/$6|zqV$a4P7Y$рdA O($brJMQA^C! ni0ͳ7.IPlwƭtT/Dap9(k6WWR)+ٍe6lќ CJ@{ڛJȍ1tf8’g hӚ%jti]Sn Kb2< V> 䤗)%VN̈x Ev]8MaLt.8%р3$osf }"EeŅ/PC4qZ=MJ˲h9t[ev؈i=u;VllwXMg oQ~͊rⷐfΠ)p256$FŝlgV2KhrN֦ ҏ~ ꫶Lp KSyMD hZEHxoĜ4:HnJDt_S.+.mvz&Yz:B6}-}iy MR6d_)$=V%L'̒!Mp pD(sCyå8rJ\d K ;0Ҫ>#]c+^͈ɇNzIq=٦_-h6XJ0>+h#0 JJ_ )q,i+<$ =5^rĜ$EzQ R.191mJ94C Y_/o!h8>0rU,-gYLheLJˊUFst#R3(DМ. ڪ?T"GzZ]G P6Ejkv~԰#ėjxY]]Љf6 _ø&Tt2zm LgN>0oajSAfjx_4̳!7n9/.'s¥Կ9GpK.ȉGڥF&_Ƿ-\* m}p͙]m{E4nsn4p!뗷n-t>rg}IP;C0/nx|'w=]mh>O={;vׯf'fĚӣ||rq:ܵY龎S4ӣ]yؼ x:Mߊgݾq}ߺi彋E{y3YVis#oǷ. 8Ӄ榇Zۃ+o;1*TDs󑋱诋_n=iaryTڵb98l/*_K7#oIޅ$gQw2KNЭGC]if'6\ I94.؆[z6O{H]r|z/LGPȴSDWv7H)-{fٖ>9ܓ.'ƚcEE ƟE{k}zdb-ޮc?[L;k(pbݰseE/B԰s v}8A4ؙ7]j J?c |jˉ*Bq%rrb7sr06cgG?dޞPh9|=+ e’|iEnިyFa ڕNbѠ͊[m`zeѯWuܸcf;WCD+_c@yJ@uE^xX SJC{7Bq׎qgtƳ*DJG7h2Ba+,9CCEJ!Rz(a%ii"iL)`>5&̂$n1qaIOdeݤX&%VYXfRyH A3\]CBWf1Npo"/|zfz9 r9[S;tŢҷ&ſ> e-]w]O.m~nrh ߲"]s n+jgI rx?,muh]KnzG7c}m_;=<Ɇgs~2λӻ-tKѥ~}]y nMw-$دN6=? WX^ޡCW5A7!( ѝĢڬەTv23X iLH0.Ae><6xTd֊#(ŻEКrƲh.h^[e#aOM>w-qb2$ q@6uFRװD:B&AH0. 3Vҿ/ɉCOBkk7Pa-q/Ժ7Sizt _ٰ͓W**ow"T)gA%FϖVB MR hekb;p! UQ[cޓ6䂠'6ѥhsgR\Kǹ j#c5q#c=R ͌}PWB=`\nŒ{\'w4@Ϯg+~ wv6vv:qĖ}J<;ee %iN.9+2KkBk 2 CQ\Lz/()6iM&ݎY0~C,[dWTcAjc_ԶQ=0 Q4}2  MĤ 3n&+ <DŽxH˸@21CE7XzD6h"QtTɬ" T'\e<&v,.͎}PVC*woqBޏL\zRwH޴ eǕmՕw]^< GL}T#$jF?0N拦]5B|TOxnKswOڷ+m:z&6"_wOPmi]I'72ӐҡO'  Żr$.O4i=E&2iNxJ$ڔ>ڣZ|iiqn`?o¯#hҁBh ntn2uM:\]: |t]hn1C8yqSKF?vg><;Z?tr:-6tя?4Z\v G^J8rz#trEtQzzdmu'%Y$pb?94m6bh IXCS*ɹM?6?`;6O(|vIZLfDpcVO^x-<|Fٻ6dWfqFK l$:!UbL2I_O"R")J4Bb[tOwUW1@ˬN#^\e|0uTVtL U!2ܥą`Q2wY/J:%GSȿŦ4ugˤQ=WJOF~D ;9YMl8Z.m(LZ|܊ ]cE-fg9A0#Ǔ rTڐ#HB/:}PޗQIfP&o<0I& at*dOaCXZ24"9RDإhaVh^tX  i Oj'wۈ`'lV^)oh2EfSZM(H&02A]C+O4m3^֣rr֒1% :I Pl$ȲUf>`d怌ONH0 'Ciz`A4{W ϘRg,КHy:B?t{qJRcGfMqgU,IyUQ(AMuK hfevueҿxtO2꫺'|\s&6Ũ[rJt θd-%ȍF.Fc2GoIVu70KԟشKn8n<9?u8_N?}&?΅ thd~>D_/?x9v\3<FGa%kf5jW_h8+IÓe cg靦ww& {r/X0;sw==ī]R yݙoᄁ% }%P[:_׌XیEa@SP( G~r6У6S Kkuݻb[4ӑʼnPeD+w1?d/(mx&}nrg$'燷'N޾p…=yw'~zK_h)p B< ~P?}M5M͚uhMW^ ц][;0_[kc/Q(I旳z愛w *;q6|;Lq}<&hUMgUÚx"%8,RXo@8B!fWjZA7ǒQzUQ=/¢iD{D/2et01Kpq,f 6Y] idcg(;{& Twr9KM5IfT/noS:#~}>i|:LAd<Ӎ ziE9-Q>WVyZV>=]w'Iek|>ťJi/zAv^4G/ثTIic@%V)AE>uki&,oWTgؠrGo^A\A`XyʅM+@᧰e.igYO2Fxf5 $UV8E z$KI~I^2󙕾pfVI)}u.L5yȊjq$o4w_u7Y $puA5[r1L5>3)̽s |01^Nd@пG(c<71 _k:RK*|2'>7 bS6v~gL-{1ՑNg4ȵ^`| Ք0,vw0ɾx)Ȼхٝ{P0<| ǥۚъ_lҎBHBKGZOȪLH0^i[1̺JKc+P u`]ȂdIRLbw,rY~hFpXn:(":!rѰh2>I )T`%VjJq!rAKWM6ΐ5,pZK{C S&dVccݹ},td8ԅm6 y:6?"6^*-$y|sGSj2OCVmW  L&9L8i+@*'VV$coq ř̽Ǚ2B2I x.;ɵèPy dua.N`p8y>rڄ:sO^kt#8PfQ 銒Muv9]O5>.oc:~8!^W|&>A9esZg ū;«hճ!9Q]:E_4[M"&Ur䲲&檜VK'lanFzeUuzAA;qνe )Kn}L.hP +P "9+iew޹_w>j[mթ,Ɛ3A[>w(Ls}k 9.e< ;KL@D*pPZy@ST -MjKr>S7օّ S|ָ 3گZ%YFC/ OGgF~*GS#r4dA*hqj;f|{#K,3Dg2g8Bvs^'A6]rٜGsvܝ&ܒ]?ST;^Bб}_[V~P+~ԬI2+K>3V(._r(]qf-{хNJ"H XG!A$EB+'HG<7+H1HҰ7֝ەxUY(sюLkx[_W?.T3NAeS]Yju;]w"N< 1I~0sUx9sUzqU饱z %JsA]\ԛ 7u_Ƴ7W;KUuY4/90 i*WdlJ+֜gu0MM])jt_R׌}*Nǹ"Ʃ+tVLUkP`U2C/WɫL=͚?bDASZOލRi5)s yb˩G \ZCW~Z}7@Ĭ,ӶdpLWB[ϯ 6<^Jn\.:&c,RK)I13"\rWs[/dgB`aIq zOO܄)J-+)|]vm ݎI(-BWaUFwl>-Fk4-tU^1}k .fV6)##OUwD胉8E3-q3 y呍<42]1(,3\ YgZF$VfaKBmq(N\/J0o6°(%u$Ȭ*{hV0 /K R켍1߾X8sHTҷۿJh)?RgZȑ"ߏ!\ &7Q|ٚȒGo[[VKLJv7Uf*D]&GQbmk|轏I&'|\K밖q}A[M;a5r6ΉV. ^Wq+l9b(wϨvgL2gÔ(ʎ(T4 bβJ(J+H+ JgwN#(M'/O4)^F!,S-pe BY62h'i5F@^XJHOBvU砅 2Ψ1D\ HoqNS`em8錡j2 \9C `hRI&.`1^zN; Z.Q=ɒ4eG%y 'G[ׁ#>3ƆO"I7Y1Y>R_hiINq D{RpJE $㲃E_.mP ^ջԗ,& {)ia7jwR4ti3 1H9!Oqo?oU& .(f5*A)j  ^s]5C]z0bJ' 3H)hjhmdmb%i< JW"e38)Pcpr<8g> `u< ɡV:Fdi ;;Z`n.Ouu ļ_No%n 5mgϭ_M3%ϖ-gGӚ,vn ]ar#ḱ]u mbh&')c ܺjZ -*u4nۛd7 %!)hYƣzCgȑi7OʲP-ċp5=ax ޝhtW/R{ˮ1\)|;!ypL;d[dKK?m1aP/M!Iu LvBJ*!@IQ v&/Bɾ/Mv˸J)I-u^@UZJSXh)Eu)pmH xeyg6[TYbOKw.F@K[a5J)*f4)W \I*I$r ^BcsÓaFs69&%A3tBێc"^;5 Xv1WkZtZCih*H!:mЈ]P#9z5v=ꎚ$-dDTs٪Ba8 hJ&G:j;amSdx,|mah;ič@aT%́G錳\B$ZrQB,iAPNŖ=l %DiD)8Iiʱ?qy#jhI3< +MlWaG~o~\ܾR_= csh՗ɹ,P@UkE₥qϥ!)B Gi"2XQ%DK&PЄ,MNK5hbL0j(n`璈SBrTB}-QDlad]!|u9}>Gˏ~?_Y~[1 3 .EA%\ P$St^*ycq;p|*RM۠$IPw0PSI[NjvKg>j0q6%XkťҦ¥W"r=*9NU>B\ Iop^-ͻ@Iw5ݙtGPo#M>qtߍ kݠ_ՒfI$f9[Ո2f$ hMbT ੄CCT&kT =K.IźVkEpWJd=#M*FPP!PM)r.Fe!$o:^X}+ a:/vkQϴOdJP[@㌈!CufsMnq{1)w#` bo?E{?Y\v6fYFqz8&GQ K?_d̤{e~=%f8A=Nćފ1_\{4s0J('䒜i9&x')*8QJVQD1Bi׳K)Yq@N4O Ѓ0~=u?%Ԋd6J8/՚ŋӋ7/U4bT7NN9ufU/mµ< .T-9BzY^<]ksm}|vMpv#4 f.Kfl] G竬ȇgaAu55=IqW{r}Omݰnd\May1Q8yJ>=YٿO7<WF6:ȶ^۞JbI9;(>?>JNOhocHGiQ2X9_~7x&?7O)3p 4z\NX`, ny(57S|~eGnQ, ھ_C/)4wrDAWV 8/jk9WZ-矿 1!m~7y7>V56m4NZ1a7$$584 :h   o+Y;݀D PΫu82^h4Z<7:kKBC.] qP$$r Si:mؙT*%Vkn|5–.qb aЇ!quk pE:KD@wJhH9w* !U9b !twfMicG62JY+HO5,iQ:zX@"IPpXЀS.j'!(b8xAT! kr cPסE<#?] B] Fy q؛b@~!z?\kABwd6 |w,޲ڞ5ޘ]5~x9xv9q^Ԝ)‘ .Se=5; kEuNB KB^F\:*޺:kzmf*/nisk.՟~Y/lٝ_/j~dLI|_`~5GĮp^BöeJczt5^z9}-_Wa 0DC0e?]HKF@6-@J€O |g>TjP#Aŏ"hiC QJ%k)IJj`= =B4m.%eKB $᤭M$aQkAL+Mx*}j;7PCW4P<Ѽ#K>]=#tut`[v!P.,G6MVQYkܪ _7hC5bZgNmL32 5Q0@;DqpLV$2ID\*AZ*7VD6 2\`*=[Jԡ @<1 (+AT:yn)v̙^⬂7{Ƒ Mp2RWW_ !x:*qM(?ûDJ9Hi G8Ꞻ|]0Q9 N؅lAo Uu _.[kNALWrljՉg1BѥO~u9 W`+7$B UI*1ȑ:ΰ[wyu@eQ(*zō~].hotk{Gp ;W'C.m#"G}njm|\= @"Zl2g Zf])JAEo&x&9rv>pڿl׍m< #Fl܅!C5 'e*K5J8']1t9B[t%}݁~gW@STT b1! .A!@q#ɴArR 2 1Y[,'a 4i7MnqUѡ{lI'imBGSkSc@15;kɳ 1+I-ʣ(I:z,NcE'o%1ɭn:F[9;UVzϣMQet^T dM \gªR*ńsWv,ȕe@Ƴe6fIIf3ga9k t tPڤ:k ;8ћ, ҴN29i`AԊ@"s@O?1[ D& ɰ:dD/q kBƠVOzZvصT͎NHxD沁l}V3d:+P<)L hDqTmxZFf)%%R|!3,f۴3^dbT+mmbiaI/zt(l+|avM@< J-WsQb bd!ma5r& AGZhN!UQz.hzN*n V1_yy2*0/lvu >V S3ktEʕ8IB^(pj&W&24kK5R":[nuҝFę{JQLU.cQh/.72'@ӽhhjk8牠P… ո,|TчϹB֪\ K+ V%T˶پDȶ#ؾRA7уc)sM뤓 !FeBepRYbREErzlb(mn!Dc,KtxsBiד=Wr+w}u1gb~e  G>50*bl41y/:h%9k= d7Y,ցKDIq6> 98A#ʁ59KJ r0f׎V‚wXSN~EY>g*#c:Q\1PVwmǨ1 *K<uUUG T]*l TWb#RWDAáBǢ VB6zJG`hU!ͱBT &UW/H]n/l-F, OXw;o} ^0Zkaۯo\ƷFZwdvXkN,H4f }}e5wFlw60u?7.8z1;~ڪc3xwkZ6 }VF. ʻzx^{>vCm2e~Dl҅xQgu7=@G 7jB%%͙Ǥ`gWȵ E]jϼ + MB.?3B* +˵R|LG\}4Pkš+Ұ6E+4Sop^^;&S:Cw9*lIk_7A-WѯuE)0m c$GAn"CEGW6#i2αqf6qad6j2|9]&m`#EN e Ih85v-ͧjGlEKնe L*Xj0EY6;)r qY^p3C Alm=j[bÏݺ֢lV~X/ n9 6Dd Mb9)0c, w޺chVA+t,B$o -% r%'=Hb@(a!nxzףRvꞁ0X{~Vn^)/ɻZ^B.,ǺOZ :S ,J>8kBTTdB%" zU*zzO/fVՋ]ڇ䮋>V.Y<:XylRk$p  (F" ]Ɠ#@̜ze&śU|@IR# M 'ГfgF(9kICLX%eJȓlVAiafi]g!9AJ',i=2S`6ANY)=Y-}F CiK폹?u/u~~~kRw"9zDž@_w{qB} ol6t|RN4I<+1" ef}-z﩮T!#fSI!׎ݣN'nus:*Gi*#e`Ie"s=";eGN3cPZ;'~?_/~zW~_߽?n'ZJu$p3 ܉&z8/-DSCxӡɶn0Mz+qol>$-tf[kBCwI׷O^C$[@WQ؉ӯ$f$5UIj.SU! X/\g-cpc#-:(IeL0v!ǒF.+Yo987ηc8Lu^Uq'{>`Ghe.]J[y*DN ֣p,f4I+ YڃFvFz#M{yfCc<6j['cxba|\Tym\ӍIaj0 7a,l*iRbםia^O` \U%ZD@z%ͬBުʺ2a !tx"dk!BX?z/^qBm*!A*R, /l!գh:(xCԹ[|.bÔRDnE9YJjP 9Hq/ QJ.EK^f*ixR0kk #9Ւ>e۟Ԕ-5&Գ}tGqަ+6QXm=b1Ҽ >ZDɛ<``Z՛o`=;LMV:SR(4t!!Р]֣"x? BuR+@2I9ipØb ph)N_1`: }-ʬC7)p_w0O-+,H~j0r*[6E:2k;Γ$0[tZ+X̩ ;5=$`wv&-F{9ۑF8ǂAhz9ܧ\0ڪBdG- EdQ[7KZŒ3iEFTrHa a:*eHcIh :lb X:#gM9ۼ 7}/oJNhLV餒b4 LbJ @i *XkR̅# $"Х<ozP@ab^ں;A2$I/z)UD·_C<(JE_=:[hd &{W1lж+dz)U:d!CyXKYu).2$r2 6y`IkP `{ؿLwEP7[MHtzBSw^eϲ'd2HpT y<Aq5"E@oB*S:LrƤT}X_=L75(A nDie:%* ܸ1Zz`ner3i/ܾ'=Blq9㫅|p}Sle[űYc '2T0 [B mҎp]@Xvsodɴ%XJg+IŸNzv}캕;ve{EKɺZNڌ}Ch/ 4Rk,Q`^'sRɧhI e-Ikw^B;NjúO=ǟ]zkX}'5%i#xIB3܎{Q^Qj !%EÎrY#TcH&jI{J1&*yupIg.Y@g"]]j!ŧ޳|ϒR%ܳ˕)MbQ\9s HKɪ4*Nz?ђkLp-Yw`N^- nxIO8 C?s#̐܂CHT&'`t@ 3@ίE]r`Q>FFoŮW<(lqzӪzz4Or(1Zc 3`yn}a"Ja͸~~L_rjn6YLdtv o>:NC~m`p ?;'z˳o|v/j3xu4]U5󆓋Six.xq8}eh:[ju=NiQ3ϗޭ ω~lHJUbj&?X}x8`gSS1-P!R .,ۜҘ#_ZVMrTnqwlϿMMt} LlɎأN?Tsl{3W9zppqa ̣{Y)oY=*ynYs#n[sԽJNhiQMfȶ8 8?bL_ RDOgލ ?9i>qT[u m;{/}f|4~xQLJ:GcBG&\Y)6E?>ŋst%K0ɕZPjcHI}Y(\hFt?t; h {<*E(4ԈǨL@c4'!7PԜ|}t62>K15t|`Pd%q5ngoW`''͈9k^WCܑ=aq3a5N+ʙ.wPЬҟLÃ7uhAEI\FeL,b`-D$(mR|oXz~Eiti (.c\5a q1"E:%B0Gi̥hdIP-5DeܨBa çCpuW qQWz`'Ȭs`Kf8:,(8ebL6^]hxH ဈNrhpĔ+-<4@gM4Lጂs yKh$fDq *Õq`F;z\NG4R,hjѮ޺"vS VN'?k*qݽuβrkGJ Ja T  qF/),T.]`hEC$y.4K)c]o ƃjǹ2D&BIΐ2fW䬉p$Io\R2`Nl "I@^HD,@hv"yƨ(EH 8 bCTm2aɱZ#BiNScN# L$iE<Se fQyg~7{i, D+I: qF" 6xj91H4ZX0,|0[~O!k4Nm۬| xaPTǁQyJsP13A`zJgV%0) Mx狭l/wy[m "ǭ&ԃvr8.6S.SGjƸUzHYPG^YS<|ӲA}Cs +6Mҍx: m^| |.K!]ԫʊJc.(]ԶthONxe;v*%7B8ayNeH"rh 1G!,BJ,1D3Rg-ךROmZCgCUgLDP-fQ #%E]Xdp*T^ka`Eo$}ԝT>N,qNz҆?H9v?ZvKn]e}/%xj@wq0z7qIu륜{z7TL΁J+W3[IJ_шЛR}qM'ʦ\2PTDe]ȜcRL` pʩDA* 1q3 C>87vwo.]a9{_vo7d) u;BIhj D2W.Lbq؎C>^/ vnjoY<{޿y$h 8筑=;:Qh` u sU )p@`6bq l089F9x`zϛT:~/S *RR9D N@"h~+ˑBC%+bxXC@\DWCie"cNEo,AڠI](?ͺP1$Eb:MM.xAYH$)PA'щ,cgOgxԩ.O.fw|:>:'nu+_M3mUxғ颦vԸwJwv۸V֚&Ùjj!yn l>eo{eOd1B/ή-:d_}s}G9J7q^ *j; '@ KW*⠬.\v8mN88Re7/PKHF*! hŤ*HPi^e@O}ݝ%qT^D.L]MGK hυsLԢp2 6(T6(;*QQ$WycN1DJ'Sh_N)vv+jՅS^wNʹo/ǟsǹB5h裔+R֌F81@RRJ#Q!r+=0_$؜p3HS."@;*r+ >V$6vp?5ݍ +}7GK޿٣2Z8bw7nr'4dG+ !Q("45hQXc(#uBjOoB=#sC85&%QcfyjH!&Eε&5z)<\~NX6k=$A'!C<זIHF,^[3/)^D鵾֯w(mӢCg?JԃҞfpp& K%@<+p;cxa\X^ZS`RiwԄ'@(">(VUlCJSY{X道N#.[ kIjI4ݿ4lb29'sjU1 }Ww{;J r -Y<'HJrlMՐWq\,d 4ZХuZ,^MQ={a#|e;=uYIMϓp"߃ar uutt.:ͪĪk{^_EAيbZk/Ns]85AQo֫,.hN^LCuVp_퍗}?|{)gOFSxw4Y?|;ߚ=Jdvyv8w_ӽե;u54]/6.{L|w5]wk$JYZ79orx5l% |o4K57Ї΄?$%g3qFxeo0h,/K;D?h>.Go@MZpVq19W#\/&Mt# 9+wp76+ԇmv}]8vd1Zc+߯a~|]h%x^n>ŏd՟ oY?&{jH#.[爽 *:Yq^qkddKۭc0G4t/ⷅ9i>: 㨙4{7b`8:Kū?_"2&ǟ]/טxQsjhSd0?Ct(^IچJCV ¯pt1zVwfr;ZQ)E-T;ѳqhNq矡ږ͑Yh#;BpF>۵SaI0 ߵvYj, bwLP;jx"c~R̛捘E͖-# kQrfD Pp臂] zgJ[dPY.QXtx`-D$(mR"m'_j,f_4ھE3ꡝ-r7WMHa\ HAID4킹@M2: &"%Чjud$CCB搟P]xX;qr 590'|ZZǫ{˧갠hV1UA_x^,"-YȂ.dA>/UUuq9mC#Xi!7\C*,h2 iHtҲe=I&G6ANIS  yk0'6$"D,@h"yƨ(E;4Mj ]2a)Z#BiNScN#EH5*Yy :#v*iqy^ՂQorrIZzV@t$i8#<5Ĝ$-,utl}c?&pٻ6r$W|;`K0 vg0b "Yt4%$'_[$YX;xbufOOq= m(`P|pReZ2EvF'WMA8gM נ!oA|+W̼}@Cg Ғi$hB;\g+H_ՈEDp=b ː""1Df)ỷC%x TTgGTo|zVۀ޺ThY^l6d]C$vIm m`/[^»7=9m.wIׄ*ZePkNS`9rw]1{\䍣y#ֱ5?,[t0Lz{b0'㼛SҡE4O&YwSoqTWSs[wu7g^?f>I|ĉ; P\˞[?o.5nϾ9eQ@ΎYsN\v3JFmhH+H{iQw!Z(ej ge1fIZǒ /{˾d'ǎ8D6$F^kgldʁQ(@S\$d溱5TX Pe}Dk^DU %sSXYQsbKL[JqC8PRm<%Æf_|TEoZHDFFGȎ4Z.Dm,jh *S* 1,B;B"pZFlN)@B-.e@M-y&`r9+ 78 2ږ8;,W*j͌BSfdDbwq|nh`4| gobK"Ϡ7y'\)RfȥBs-Xf(WɬhI0MIɨ`T'ɷcg<#flsqbծ6;j]ez#\-A4r瘒X,ad9Hã:JA]Vcb]{He!"!/bE* =#(\ɬTls=l%!J1b͏ZD_""q+Ŗ7l%H/ cL' %d/*w*+1n{8c*gcD.v&B[.F"-WQXle5qn-,e!:͒;E%Ezŭu "Mbm}d(йMTۑAf@9 <I zȽ]܇]<{XmvPUL~T$>Qv1%uP% vWi[Z+(bӾgϗ:ֱrgk94Ox2K`ګ?OH?R ΦxqcB|'j1yULr(˓Sd:j c%ƛC[dKxGx}s˗p`4)k@|z+K'Zh0m w/^}n>0|o@Ewۧwn`=;+7^х@EV@n5.mYkkm^l{@HҪWƄ ) Rh,9hQ:"A,ET9<yDN%0Kqnj1%U|t.]6T1E#+rSsZs!X '6[&lY";`?ae2.\Mgd+[`6w )YG_;w{RuSC%GӺC7G1b:Ɣ1[Kx8O;z233Lf\hw]rYx3(T=e{1w!HeTdB%/){\0/eJBѤ#-5zKȧϠLu8ޅyF1ἬL>/|N^ߜ`M9gi%콮}? cȩ|ZQ(Ꝛ85xA2{p$OMV z$g0L8 Y:Q0G xݜ9,N g|~>e/d%jw4:.۔qdIC("{ yv3԰\2_jxT_'dqjkJgVXw q);:FJ.fxFG9#fqmgͬ}ȼ 3p q9 0kwex^ܼ]&0 `0Om<BkHD#rۢxqq(cQÑ%ٗM]u7]:EhS֊2jgoWiq6Lkwu{ỳ.8e] s<V 6lfMzpR1'.=^ɦnn'JoU.rSkY RGJǚ_&X>ʃ ~Ώ`ܰc(ҖF ͉w ACw;.ܻWݛѪ'9B I0ͽD^\D_.OwTe״е5z1k>~o֡*o?}n:%iQ6[.VmAZ~&5? gh/ץ\_V*"?Iw7?q?I3Yh.Xk"f!'_Y>TNxhF17`(I_;4yM={ACX'uaRw$zdf.зE-79ӣ3*7UnTd :&F¨D'͉<6k`DB^}\mAEola~YPuu+knY Uqf\7q\]!R*3H^.rW= $REPC"5Y:nawyN;Y )džTP*$SHT q*#.nsoXu-tH,Fn}r 9PrVruAr %ێܟѶ6j#σA!'I̓$# kpO_ \$b?g_>Ҕ4Vtap>]DDz7ntQKݨx䢢L 68ЀUv 9s6:Si M[m;މЖ=LJ"|],|[&D-?DYWbWTZұrjjeSLSZRϕj$?[ 4O^=/FI=n= =p(@#sAj'/{ 9_$oN'^ݱbblZ.}| R۪j~h /ƋiN3yaxê@sZ$usp53^k,k#OT|,K}|˭=ۅ}S˾kNnb5U02+'a/½3 6g^%xR2|%gS4aļg^X ^r2c|9R+ي,k-Jm|,SS{#$դi9m 䴁6r@NȒ崁6r@Ni9m 䴁6r@NiYr@Ni9m 䴁6 6[&%qnI[Ĺ%qnI[?Ee(C4O_DV%CUXzh! f y sA=wPsoSX[Ĺ%qnI[Ĺ%qnI[Ĺ%qnI[gΞR=J z0*_oae$g42HE=ZfoC1@%`zq` # -"1)23]WСa9Z-EM/ \R[ERcib8 mHti^].)CJO8+%&yX/ %HEǁ) XR!emNQpLIjZwHy>dȳSx"gc$}BF2)2(B:8iX*C#E#IGGI _P(g*tB923U RYU4c, LC(o`,|w1Tq6[C1Fd @)DRI7amQ /YQp0!d>ܔ׹m[q*!+\ s%秓6L4<0Km*CLK+gI1q&۲ en-V'rC2e=".X7XȹA AZ1jM1F: lpIVk/;^s%*@klFV7 F4 +;}Mîr}BfSysbo@cFG$R;I$m`)QA-‹<2{Cc ve0(~.HwpE(8u)͐ AA+bTQ٥BB,ZZJL)0"~ yÞ7K"|24sS-\❦b%íFla"{&CD0)1%.DpCLDl,,xGI4 vzwkl>o8J{ Lp!ՏN8U_r^\>&?hm!SJ11.hM4W+T(R֚`$]Yx[=3ms5};lzVuf|^tV<8cn/Vf_}.%ejW2CCe ͱefOuXpw@y&?`)C߃][$Rv\۪.ł$"jE HksKQ:$|hԙ|vZ+ͻ=tx֭*^"B$_SKIgXBhS /$0 erA4AG4C1Jga/!u]ϊQ a&\JAiSLYi %D0AZ+i}m5ܳ4.kOz1$>mѕwp[N"q2yj<~Z(8O٭XD˩fL+ba% kB+V@iG FO>'kyX mZͬÆs`LIB(@Ґw>kV4Oߟ^_|Cl]-)9#l0A+u6uK*|ݺz;X֬1(8`8ج(#ib͊je&cw>Ⱦkk $|'WNCOym gz ׺XY9_ZK_P$IaORlpoo}^2;K{~k=I ӋO߿Nfpr]L2LvL'Ϻo0!{)yvK~4=M2<& }*?uAtZ˛w%1:"X#'&a||0SɩS1l͆@%)t#JҘ9v9Njq!E~ `@}ei?$MϤڤ@犟C']?|޹ѝ Y'9;_㿊;7w /Y2ylfwfd ;``E?tO?v8{ ?s("@pä ax nK$nV]kBfGJ%u>H*?NϘ~PTB0E:`ϡOa:}NL(NӮ/^, !SDLҒhK/ Ec$|ONtT'5W[NE Rg'(/A}$5s)hi:ֳIrF-5'` k^71]A=: jfmfovoX 턷Y1%qk6Qjt3Cyc3.{P:n'jsw1vogFE&ȴ D+A(WF !'*$A4ʪ`% dGlJ^sx]i)EK Hj8PƠQs- RkAz`YS{j5J.'7W{K! "Br(M\Z^)&CȾ V/|DA;k+&'QICāč:#P8bLH$8T NUWTpҖ  L@':%hqѿ"/\ORLA\UWU%T^ LDMu̝_QO { HL4 [ JIj;)A溒ػJ-ߠvZ[r1>ΟkUBr" .AȘћ8K"R0HiE%#xM:F$NJ&fjb NP(B"!(OF:Fdi 8Z`-6& ]yquv6He,鴻mcKoƻ3W.ԋ:,o-3&}9<g7El/[nmD9c܆dfh20٥OD Ij\JGdU7]1ٷn7^)%kB9a4dfJQb \ :^ޙMGҒ/?痱;с{4)AiOXN>&5Fb܏n8K&澠v10jC]_Z#ESE @M#Gs0Z1m($<ƗC_z RVϥf 9E^!qDhJ&G:jk a1qƨeR/XL?EDSM" 3$ܧ*iny"ђ3aI{BB: 9ٔ)3%DiD)85Iiʱ=2θ{'W3ΥAx>ūF^ž=4}^DGC58UMCUr뛯(6\\1GNT?Tizu^kc=9rqz29- M\s7HǶoY풃Um&hh ђK}%!n|U3le3,+{! Fa^3pпuxf7YrRȕ2KjuջI[g{ܰDp/Lj_ʞ#zHa뜿cP5Uh+߻?)?髟ޟR7p$zWأEQ,)e(47k [O;|veW^4Y=?[y )gkͪG^lY̚h2p=U݂b0?o˨ƕ>72Z-Ɵ? r@wlcIl $$5 ֊ h"9h m .8iIVY:;[;fW7f42y:cKBCz qa5֐ՔC0hSd]gJӉ}KEmWw⋗α x>/rbSu8u"gϝ<"h^ڃCmH!z"\C :E> FdT&8.1qE)Y?]<8&x] bC@ 7'U6| V0,]njw\[\o%skЪw{'fWA2G?*=osZ"kw[Ahz1-&řV $M|%B3j!=R*R$PڪLzy`{G>XE.i@R 3"ʔE.rJǜ}(RRƹI9/ic*" #:j9V {䊉sGC!oӭ)n0=z ʲZu4ob>8lnGW-s>)3 &&DY[n%/gg=A`Đ^&i&8l*k4LJ#*#t"IS@d rJ\ F-Duhk\V<eUF2Y,Js?Ϝw.籿>.ύe}BX7jkq۷:~, ̂HLW (]T͚}d]E _ 5oN["x+ׇR3둳 ^vpg/Ors<(%8xI)mA.R\fE if̪}̻&謁هxr4{ T{*.NU^ڛi+jx9s/!9#>QP~pyP/L< Eb g㏲FIoU EmEy=#̞,{WQrKlM"XXMI`xcUVc!f9 zFm6Kˆ̟@S/ :Zv(Zypcg6r~uٻ6dyH}UasE&X;xClƔGRvFuԔ(i"gzzjf~]USEDiD 95ܣwbgʍ*EcT&ey, I(bHЀds9UAB\B|uk-̵g E:tJRWҠ ov4q'-/:(@6K`n\mjs'j#dOF#=WQK辻ڈJǠs@W2 *{u͓{#uؒv*>\)WYNj]OFYHCBiO\;h$8݃ϵ{ᦟyV ԴwB չUoɚMEVF(^d`ZsD8Wdc0Mn0R!ТTpj#.>.l0P.' ~R<)uo?ѷk_]vkUkŠƔ|7ѷ>.aδr-3/\xo0W{q_`=+UY;t.4惡F6J1h|x/e!EϾOh;9].H>@lO}Nr5Ɗ3lrq&4qF5~@::i*f: j(I. MI4J8 R àSG+5Οnm m,淹)ˮ7U[v*UMJmRqPRLPo|}U*e2mP6^ l4gЧ vToz\S~3`1Ig %*!B$/ILGth⋪&Ϥ|]؟śsp>Zt1D?p DeRLf:gN:ȉq $ybJ˲rgUX okoG<"pyh03^tno;f}vg(%rĖ_JrHΞJI>L!iI73%3f4<,S,gPc;AW+suCyZe-K#ȜAͤ(9d 8E=*:GZGکCY+~&oՒB<[4b4s ho3?S~`N U\g)NOH,S@XK$gBJ܃%kĮ:3OzVBO< yܹJ@Əs)8`rZ)kc.'fM)!ZHXWXbabAwW;O^%qFbrVR!;sDHn"9餾/+wB^V-VG š(CRT۲roAd%}>h&#}:t0~{_ rM 779bbi'hqn! M~{;i(=<+t.mxx6Zռ(O߶[Eios":Zzc`(1wk~?b/t]tAGͮE~i\lJż79tzA~y4-ݻ˿)Lg@d4ӷ?JKyQpslJJDf1M: J<Kիœ{Z8l0 jʦ/Wn3l..)9;v #a9۾iɗi2bҝ[l"hإāɶz'$bRDI+~~͸ޥ9hG? 1< iLj9'G_1kn_91MtG+ u]uIJEWL^[+lƍkw z9jwtĶi$OֹtŅIfѣ4Й_o0ZhB!8`vݴct9k mEz ϳ1/ofQ1&1s4NG}*zw|v֏o}I6eSÔr7blȠ^)#?8}6ny3}7<)!y+ i6HyfŪ͡5yyW *Ja=rԐ2y-]g—8lLsFK-᮸1R:!0a\.~vv:01s41I6?^s6y јg,1I;E1y͉ )n;u>KnBD ?KG`P2@IR+.'R-&ٽR!d\r鸁8oJ̆Tŭ?ٳ%*īg=W;w(˄|i0#NtE3fV[U!k&iں~+M:ޫ%;~9Zɨ-g~ ½Ǫ=).odV۾D\:\5Ѹo&7*óBFELQ6dE(_Sjy;ϙ˽s{y'ZV'?3[ ʅj(T)MH*Π}\iT,n40j4N!rcJ vzfIUjlVUFA?|<5ʻJ4C\_^כ7,+/ΧƗťQ%i=$)>p\GnԸJ9=7kE`/!+L d8ą`d*zjJzhz)!8C!\*8UeYFΖIU}%EB #U] 2q!2+3Р9pT )lɗ867@].<k>p1pnNY[jtbVC_TYZhJ'n5'QV&-EdYKw5)-":84ǐ0yntҶp3s>;P]%ߎ\]򭨵%ߎ%݋vU.ΏT֥~޵^ hOc o' 9kAeOBWg{rϗVKV "mqIoRpYHU%.O2)+deݗ&%LICYOtJ+.} L2G٣kㅮFΖF EWswm9ָ}ޜ1 ,ڊ@KpC<,Ǡ hss2a*Ѵsgp-ݘpȵ2 uݶ9ҳd"/bO)Yd*lt {v膉#}zՖz_5nu0ඉ:c1>'g8GY\:W#wlc8hbҌ!35*(LT# LF]ȤRc߉&5p}0"GTJ{p\t"КrƲh[ dm=-?r2rނ2@tVE# DbN!rI8%u}kI1JriYv!A>/bBxHʓA(bC0* j5r6 jzW^q6U㓧VmXmv{jR(UY>[hpsMt褌1I P59=ؘ%=)v&fEm MkL UEE5]V㌻b:Nϊf"zsK$L76`iПL9bK|J<#X)N9z欰NE \jT>r.`%*bK+P=I6%MZy.L. 9̂9O㒉/];ڶ2j; v}wRC*>qkPtbdA"C'K{mVDŽxHe0x \L!/ʡ?" G(@d Vjl֨_o*/X?2";D\fu6eo1d9chxĕYΘ6HI$wY.|"N%5$Ȓh6\CTjlFĶ9z"ri|ո䎸D]\Tvq{W&2դ̜1\$Gadapq/xX;2ʻ bxx uяn7u'H)FlN43FъA)Vo睊 8z $a02FrQiHVj2 yRjʑzX1kĝiWJgV2hQ[ouMfZiTT,ph!X%-*֡nm| 2):2 c<3JK&6s2Fm6Z Ix+ H5rn+ ZMpa)em4^mZ 5 t>y,$bIb\u² abm+hQ2i2ȳE8SkwGyhI@5DkT2AHԿ*_ڐ ޴ɣx/.j rG(~^T:6'k3cssnq?LFzC3m;7F'j/3kQd4cnJ?,̱T7]j ǭWޕ6r$Be))vx;{<%nS`FQ$UI%ز*VFfFD~G] >ݹ 6:)doe>ȫ箊wUjnQ5n${$Y<0# xy]LGL7 dlDԞ[B5΃߉v!]@nCkO8;Z4M_x0QNj7\6-r黗][؛OQh-z'o `[Yh4mȭk-mrXka#gBZƑV*R#Zh6񺳨W2Bt HE&eP, Rj/} ZTEڋa,p`Ƙ Q<`űˢ5$YL'+rbwZs!v4,kmYqU64`{@ {K k,ɍs4tuAC2PG]SpVzO7ߨEg >bҼ >zLjbh8O/fpddcʄ3_)]9PQ M:W3U3[3eT!I ˬb e'q5 9OHK@e3\jf'0r(ox\Bdj}TN3K\J4ȹg. @ۘHR~+6o>AEsZW ǫτWv :^+TWj^ؾ+l -0LO+[@1PRܰ l̕/K'';*{\Zƫw"/݂kXۂkAv1!+^0l&8"x/qCKL3 /tLI}Z`@}|]t[Aiu<Pm܄|oܧ?kIXfU#7NUITt hv0j駢{ 5RU.sR`2Hzq-oHf[א [Ԑ [0"KRd2g9BqPOt] 9lڣLNfɳu~i(Ie|. ~֬r;d }"{d:Ah.Or(cX֥{х3Veiʦ"%P@N%Ah'HFLk~Ddlzv>d8frZ靣 vIu\Q3QgşV2R>LCyٌWO>=FU框xa O{/oy 8|oN? S(uv ăIǃ`pexVitbh-ro3nr;ƽ">Ʋqkvgk-@R~v8<-jί4o=˖Ss-k7 Mb~HIa^ 9pk!vE+oRhZMZ"$m2r;I3,d5BD!'[Yg1X2ekcIpRyU-=jӡ-!fD=Ye,mcT2,O({F5] ≶uG}~.<[`U<;3el2Jڙh핢^cu?bztN U*W*0XR@5(m>EuXo6̔QV#$H!:M90C0>Iz*&霹VYFPȸi}9W9؝ZLzԒXJ\2X.fN-N=SZ?BjSVp۬r#Zj4Mj[T@S Y)|ۛ=יzXX}zPg|l'eiV* )U u\,hDr@di9f{P3l甮x5#<ՌsɶM7Xá`} 2CpL2]|Y^_sUB&WPU 9*X"uߕ VJ4VVsJlmIeIVu89ĤܗVyUU9,:zv@m+td;/UGh:z<~*IfA)Μ4:Ą!c 8&9zbΖYO7Ԩ:$ : I\3d`rkHGBs Kpc,[q d2ϻB:+_}Gtn)j,ɲ4`P"L\JBmoRo 5˜bCU@ [)cwҲy_i+}om9_\U&jgch+"j!$k"kP 2d:Cٰ5F-}Iİk.%9eg!lO2ĵILpS!zȊb M[99Y9rR/MKE} =<(|0R"ai t")HNqfΑd =j jMo{;y[k#R41s#qQ0g(2$ٸQș sDG#URa7D:1Z M 3}8"KdYs۬Zj0A 2$K[sT){hgmX§U[:sdepZ(# 5qI`^<5{[1tüF'٬F&-U}K%D [$q'-Wv(6Bv&6lWb 8lHq&6NYe$vH];\XWU۶B^]Bum1@; ;ꪐ < jdmWWJիW@ [anHm>.+.YZ80~kwaaZ097F?`:/ndm$ õ=o8=m=pU`OiXbd ٜh2NW WUvWaiAh]l̀>,,Ki*uAW Z惬jX lP·_IĤB\vQpn QBꌙS5 Jz qPuH]\v)n@򖫫BiP WWP]S]BW`uUȅΨ+(xU^]Bu4;BΨ+"`gDPzc+}˭{gWWw#?~xv!8lݎnڌ7-wͤ't_@;^%rkUZ .#_iEe~R@d7sd&W;s"YceeK+3;Y{vYZ%u]aKU/yǶ<}??V/}NnU)1oIې4pd:y-Ah{Ȃ?[zo3j|}>Iße;++.ft V|bp@XnJۜ1ݶȟ93ؐhg%^{2x|I02 (0 qԻ5axjro]g g o7I8[rMN8޼tp_+:7§ޜ9(RS[Pp_WZl&,;o~ˇz?8({^2$o"voV/oklkz\^%|s3nަdTT[*Ñݻ[]pS=1曛>5fl.fH~w2ǻ83^`i o؛*rae wQy7R,qw)Mmu_kwٛ4)9''-sSӻ .;LZW)nqȽ&I9!n1рnf~p~GDb]ᐧw]̠[Y[ ^/Eys͒ԇK 3ݮ^ <_i*6~79t~| ȝF{RLx:Pv"0Vx%d4ݖ ʛ=('[eN3Ij5)Hmw-&j^ %ζ띕b_oSTİCc17En4?{xTmohr9rF/DCtx&gob.:E\IvS{8-g.hG7~ӮSZ{ y8 7Y:lߎ{am.۵bqsov~{ƻvϐ>j)m?D*h^ZYA0}ZA!^GR~Z+!tE &UMr_ugjc5VhN0V3ڽF6 ؽ_U\Z;ԒUW+}5Yg$jYstHI; lߕŸkuf$nwr:^N2h_tWk_VnkFjG > j'Z34UZ '71ƚp^3*AU3L +htp P 2+N+Df+Vٛtp0W,֊U%vpEMOgj9Q:Qr_7ށqjx`++Zpu%!dE`}5\䅖fJgÂĕ!pjԒsTq+s\\ig*] X}2*Zpu«z;le5b޹`Z}tWW'+K<3"\Ʈ Wj+Vf]JDW'+Oj®;_%΄K )ļWmj9sʥ[Vr(JUs(oqzYedg0<1q c* XzY&c4*]2䈼 "\AB`$j rJ'9+/~Ѥ`'gކiZ;U:qdS=Xn=SҡVZ7w\JZvpޟ I;qő'MS{өƩ3[G/:2H9 v\c>NNVR qT+M5b\c%T:q9*oqrMW>J/䂫ĕB;] 5v W,Q-b:xʰ$qr3h1gBG4Ë{Ylggqw% 2Y8v1dPW^u5_yZ}*2|a Ҫ&\A5\ kԾ4a KY qrM5bW2W'ƯkȂ+J!WI[ XGVIoWaÙ%8t䱫Qj8҄q*̶#p\jzv5 ѧke-Z7?WRW'+r"\A6Z_ X7sTM NWZQVzABWrn .z\z+ȵԂ+Vf?v*ZpuچPؕNX] XWSqR,IhV(JV#ϔtd@{ڷdx@k9d\zDŽ13/kVk[rqy%6sQ.w~w s_p?ܠhoOUYݯφ{@WݗW. ݽw_BBrۻ8Br:8g,l~m{V6q=+^tq랧OZmw?;=xah[j}ټ}m/;-WҬpKuߴW~OrFue ʯWղqü$o+.>A0pxƽb%tJCos4wDʈU$ܢαdDP&R(abUJ⾀b4nAY ] * FeF]BP4pJ#Ôb+{ 4٣/BFOA:#k=r,:L#zڈ^ ! J٠oF y/? "c)Hz RF 3Q!(3EۆZ:clR"U ֦Q|>0Bнbf7iC5 1 $S C&@tf7QDQkq+rEϤC+‰'&&4X!Q.)+Y \̤5k^):?@OM]vtDE^rOu!(I"/K.~mL57ASIkiA!哭4i5؈V.pNݰ ".!kE~,|K!\ޑ OANݽm-6KEtN1gJ&C;4') /~MU3Lض2:]~+7k\ :_rmTjZ=Z>Yݦ. ZMKD0^P(\ADṿ`BH˭@Kʀ00sxΡOb-!2E /3HJn"BiDdO=JheHH(S {_!hZxOA#HP$T_"BV/ѭN\xB=Cl ^Xdu4i$p GDA~*vgE#o  ,1&ɗA:&$X;S|EIhۼqXb[[]K d9%$#a KVxOz}6z_gO‚.&ڐ"z!o3&K `>ҹd]]@`~ y:t,< fv>NʖsML'd C-E[-d0l&&SfsTKQbj]Э!58E`s@n`#@z~4} gr7LJ$/7-Bs1Û#J Z5h+:)R(]&x*D=`TaH>Kkp'" `wj2+ ' +Np +E +fo"0?" V1 0raBO(iQBRXFUTk &!T=&X:khâHYa=+NF9E 6H0sc}~]Q_8mD$>3)%LTH#L5%i\4Z`5w ʵ΋|7AK9pG^E9Z#e(?˙H-tX m@ >b1%0Y2 &3ROQXm(1tqH*W/ah Z6GِCL\[\.uB-cd4+Ms !#SR,yl)`4&,?tOW$,X0B a[F藓vz7NV^:+E5L(rF5rvݯ|66&H(0W6Y'S.'؇^|T1x9Y viJ3 zv qgoӷU6K3e-϶cll^M fϹg #h>un+^.l^kZ]٨U5ݏ[7Z| m[5bp#qbxТVZ~v(PߞF^)3ph(=$@,NJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%зppOJ v?J mݽQWi tJ +3H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@'V^)0 zq%P͛m@ %)*J Q N@7"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R~a_'uimr{}\P;}4,/Aʼp FT:"p$\: 'Cr# ].bUE+]骢Ԇꛡ+gL,8]=wtu\Ο9: 2CW; @WCϫ\Hip ]tUQJItut%hpMo֋PZfNѕJjoBW@k:]W]"]*sU= *\BWm+U;J+.]U*\כF:]ԭDWHW9uvs-Hf }pn\rSբmq-qmojP!dg/?|7M9|7꡴gR(͹nØ7 b4^6-}0sbZ(N0$@V{?? &(~ff c?&bқ6tEzpWj x>MC07NGڄ,#5䶩/, mNB);.idAjblZQD$ˍZ59t}Od 1d;ʺ-)\J7QGA%- Ngz֛Vv~ᩢԚ2UzCWϊ]"]9= gK::BWmWv!"]y'=+6?*\/BW@ky骢ꛡ+g._w`ܯS8`ta(wt>+q] =w^p#`Lo誾g\PJEtut%LUkD_誢u+QO^":EhzDWcUm_語`TJz˺vEtUJ3_xTUµ*GWDWHWIn8 2uƽ'kWAvhh3]ab6J&P]O[PϹmO\Tjr)6% O0H!;0Nރ*7TVt=n(˔Gtef?+\-BW] +Gt*ܯH$hka(vW+ϙ}J+`ߟdp5͎VɮUEizyƸ7@+lѹkqDW4]{ɥ<$csˌ]0G]yʮBtU語fL W*ZNW@)!:AB1ۧdн WUEkL骢tDWHWJtU+l_誢ծtUQvm#W+-?z|V*\BW@˹:]UOLtU^A&cH<݆qc}@A}ih9a<- 3HԛZY^r %^rɝW,ݨYqZ:ˋ\US'i\qy_k{h5]Ez~A"P>\~r9_{2ъ;9k@%jY >Zw曚!O9gm<*?R54/MuX|uϿr x%+pś;Y ~I>/[|kjzj6f'wKg;|Dǿ?e^;O==?'یqkh+?/*mMi6jsg$ dZ+ K PD%[!E wpbEq_.N[wm{kv?3ma^\Χ颹Rg:-[J(V8tT\ƜB+-ۖ6e:NkN:. 'a"ѿOƟhGWG._go]Ms{(|\nh^ ˅TBJ;QR8]tW2܋3nVvx xN7^:KÇ/ˉ?0[3_fnnr0*L{VpR%N8!JTl{/U %l^Qћb {UL/s6-ڮl^.5ݏR>)bp-\t,)n{.SZjAF_ >T'Rݧ-lSVu2~=(=lhfw6-|ĐWxv?GXaԩeVi֨Fd`jd /¨¨dV 㣰 1FscA)7V׷jE歲nf%]HKZa< £zSݫURGN1ftv&\"  <8Pk![qƯ̆ f@Sѭox**bXqNqu2?L:}cW{8x#ފ,'f9AM!E3(DU|h4276൲71Ԟ]ް(d8/5J3 S959Vez䤱Gl&U'u>u6ӒQm_m/dR%a@Qh[H&(B"ZDB ~)fڱ?ԍ> kxUlۊb*v.^!lU؛|U7;v[V>AkaRTGA G*d4a{S=?~|OXr_we{jwwө-;A -)(y-.qQ0s*68N %8 Bf].em⛗@$3:M!im!Ռ]Be(5n&=/&S$qp_lپ]_ߩCrB^dJp!G@p*koЈ,%@9(PM"Dmj 6 s<$%AIXQN\ Er2R;coci&C ]/cq߹P"OJ7NYtV$8φ*`,Oa,]X9]N&hh Y }q7FY  9vM9]Opz2[h3q T6.Ů_u9C]myfX{ VUp:?_H go 1i:c";(+ gޝA{tYd'>+U0 )nDY,* 2-V*HuBQM {%L2NwJ;/tQ[s7c]o3Qow 乷lgJud1@mA+>Kig -#qR RgbLP“s)4{dFVPb1tFQJ FZC4Pc(C= !I ͌nEWd!&4`,,.Z%MjOno].>ߕ^x+XIgKmu=7T %j] ^҈$ml/gyq@ .;#ҥe%[nU%xq<:ae?g)6tv;t_Cy Ϸʨ9C6[ -CBGmC\&P8EĭP%.S^n?f_^;seՕ.mRu'P'mc!%mT"V FUR6K%:ZH^ %Z3zٓrus z¼P VO>Ot9{KXnvRW=_A͏mt|FdffHj&1qy=❏{Q\1/*%rW22$>+ !+l[d "-y1{!ëYj6%YM\n3b"htDo‘>(+5.PΉb!'H2b-Mюx*%r6MPQ2|?lHZ9 m$I5YzFLuM敕~ހ1W븂)xk~Ҷq[_~׻l+5S,!+orח]uudQcױJݾuR8u:i?  .hvxf|FN[CH=wf\RrW>|FH6Ъ6Z&Yhk"8BbH^J[crh [3Mܚ^ONkgtR"X8b#E)!>*B:C"D;lC1~Eaobr2,Vr*c`W" q,p/{=.ftDSZfGCDĘ*ՕHT9MN`5gl=H"CQOE!`F^" őV]*: 2E֐v b7g%29*!l)D2e .m*Z)9dUswBt~9JCE=Gx%lZJ-F' {ѿRuTa~ ㇺ0s߾]jX_x xZ0T$)+uz^ṕ 5dZSqpRS|=/ ~:IV Nf,% r06+~3gzjMU|) FS:?)d5K<\G1X!߿т.Gg+kهK(kGמiuhy{ e-_lqUp0x/_2/V˹@ª6ly;NhG#摬7G2vH cu:Ue iz(:5L'_=siᬱ[GUQmmbyݑԁ-ϟOf~ٜWՍCy G'-o)-_X'tyyǧǬ~ݟ~~O~*|x>u3́`J=Z=J+Jpri54c=S&>ږeoyq9nЎEl H_ξ8?K&5u`I[/(][׿bZ磼^?ߔ*zwi.$0 l;HSݷyWs[shAyzIV(&_KГNhJ΅1a=ʠ/;ۯ !Ozz>.:;Q1іC)Ƥq5Vr~Ƥ x!! , ԁr%E%uiqv7ĭtb߸w8n`A|Z}V /Si;뗂c8%^ѢQ֩3]KWkС006T*5~$vt"hYڃz \$A,LCY[&FeD`M.QM3P҃o20~ʸr;e]h>9`&'5W9GF'7W|7~O *BpYFt>A}2iLw . X[-jCHm !(OK)EհyFf2-JX4C,IueL c Uu2У dSrPJ*7Ai\ѵ<iTE (Ӓ Awok^howH>x7g|^Z1e|ԕ+M(Z*=CSbCSg"t{AĔ0I{dsFV@ؐf.ZBr PF'HBX"7N[t6Wiflq^t0O&6=?> 69K I Bt6#Cᘳ1Yx߈Ԃ3GB(Jb GRK*p)Wfd-8w[Fxvr(*.ZF9[!|R[ڷn5c# G ]s(K :@{Q Q,#Cgh3XF$o#zpkm&zZbA!Q TOC\-gBV #)Ҹ\ ) )  c6I5Dl&}M&p|cʡGGx5%ԇ-<5(>D bLsҺ8،2.iP&'8<&CcgHY6rYnw͛]))##0zPor(UFU[Jc IN$s0my@FJ+dE&O,PŨyE% S#B D>81[@ԍ8;twErL P|JFb}PUEa$7RF &>_9{(&eu4l Z +b* KHc?jbO>k0+e-.x%Ht)\AP9kdI}q.qϠ&IOc_H-X7wⱭLwdADž[H["4'6]2PˢR㌂&Z+NW<4+0dcݪak4D8FWiRHr\0h*եI['U*9A6lќ ?eRJ=D*O]p#՛1:ec.,RFRF0B$jxvpԚ>%0Ց+GH ()M rRmn8O&z<_V(zxT{ZV8'B;ׯO]1d&[=]EB"'eLTI(F\JˊrgUX/ޭ,ޅLji-FlDͯrW'zId|A76ۯެ;,韱liJl(Ly)d=HXzD;cNQM[KE娡c.C[q6p-P%,8+*iжE5cTm_J]RT5y'q70Ll`o__1kB-ȨDixvE2=#< #,wQO C 4)j4g; ` c̕<3,Jc)y<8"?X7ĭS q|c~3s2e%UND,8KGJQ\F%I-~om_8Ef^V-֬ٻPH FJѶ-p;@< /mi/r$˧Ϝ)WiQ@Ԟے66$r-)hm%0&|LFU{ߜSkkt9%̐_O?_NW-7|cMΨdɥm?MW61"fx߷,RA8;F(Y,^^w^Ĵfx>|ō~3~8_O&8“/<f~|tW&'G߾;/3'xOeO/ptN4OO?Tf1~x_NQzw<}Ͽ^:(jyJJTZѧkϔ$l4 4_6Ÿnve&'C:r_G_#|N{VG<&\%s80&'a-ǥ=BqH&x3m5+1ӂt{M#?l9/8͆>1ۯJp!k;e*U} ח3VW0* \ox|8C=<vB!AI j0Gov:'=Pۋ̯Kj;ʧO7E a9ΣskwK@ ֒ ={KҠ8;O#z.3dD!\6Al/18<;;N|_m`s\ >5hЅ !6>= ?;5{Hw/W͊/7C7UU^}%5R;Q2 v:6"zͯNڈ?{d2RJ5h,}T6z}B~y??9W1ox03T:>2!0|arWƳ[yRnlN>2$$X-]r1<&m.H1Y{ 2G 7Z:7l/lv ~Vx;H?RXURhiBɨZ8wJ0\rQ%3`\0K0֟u,~<-}B O!Qer!'ƴ n/N"v%BA.I(8[2ՓVH$0Sm&=!! [ ;Un8q>kJ}ݟm(o[Jn8]w*xh&Ë)B$"d0P1WZu1ϙ9-զI9RUM@#\y @%£>')=nȊKȌ4"|8i\c⬤-A< ̢"%UHoYUUf:O݌w/%&&pdSč| ³ :$ aQ*>g.CSଁ EzTY,3"d8Xm7;&^WAR`W0E'W\ƃThGbbVEU) dT6+%PQK阙WƆms;jt0ѭxyNsS.nj}ȝmԝ5[EQ@Zٲb--`Se|)3iTZDrЯ1k~Xk1n- ݁;r0VUԓ[%ғ>v߇%F=i'ju8쿺Q6@4Fmt(ƫ PmjB X/C۷m[ #N,1*YY!yAzT3f3gB.:1q؎ܡBz&%/SƦ:k ԳtmeaB :$}|?jk_1Mc!Ar;76fp֘fƘ1sszs"JVCWWZu"ttJdף]\!j+B +B=+Ѧ""TCW2] ]ZnNWR@GU"NTCW1S ]Z!NWR[1sծu"л\UVV>gOI .^r>\~x)J7?^fp2W $|#9Afs8 90 Q a#A Q͍Pv7+}s!vBZкttt%֢& ;ǫ+ >Z JNWjJ fTDWF"ZuJ@s"B|_7kE-thvE(Eo #])aPvBC-t]+BEOWHWNI&;A *{@k3h>n͸^ҙ-4C;ݠe]@eXd= .ZBktBPo0Ȋ kVCpE5NdB+;!}++V5E!`#d5tEpw`3t(Wzr9u6fh:]!J{ЕԛXw`w]mW8d35J۱~Rf2=]w9μ5v"BW,UsPvpOWOBWBq"Bz_7+Bk;OW^GJ3S1yEBWvW6CiuOW{HWc""JUCWטZ 4ctE(jIJii5" :p]+BҤ=+TUDgp;bِQiε\M/Ɋ Z6_iq VJtcjP;02>56:Me}*lQ֬F/7N~!\N֋yk9 D-Ihc(i#RЊs LHK"e2,_}=sEØB<=Fǧo/Oϓ{Rc3)5ZTw/>f2:韱Ë~Řx @̍ dH/|{x[f0k+T=tEpw]l< }HWHnXEt7dC]U =]!]9?{WW\b( H`yH/H8jɲvWV=ֽ#€u,:ŻRn3ۋoJQCWȡzvz3x`?qiah$PA9d=k+JW$=ex,RnIWϐ ѕ}JQ:7'cWpA٦Еq+tӕti3+8-'?40O}hahyfPQz3ҕO^Ye~)^CD/vexv@~C4=on̚,oiEis}]ߣE%=?˅($ҹ-`OYDV?mX\ `3~) ѕNYHyt(-gIW$#!`q3tpC ])ڴzJk]G}z ev`ahja(eet&]}Г%ۡ+;C[?)Ji¤gHWVA܆ ct+uhj0Oztɦ̠ ]n2Rί IWχhdCtn7?yxthNWrm'9M.tܹ!`RO}4!hNWRxsz dwS+޼M,?"<vtkXU?(=r1p:d-<堑{n솞t@a/+8 x78oڝ\[trX.4e.E˫4(Je+vZ'Wn3m\.vBxt\ 6DWlvx*\ ])Z=rЕ5zk^'g8'>@i׮DDtu ʰ+kOW7N'OJK ].ROt(Y&]=C9+nz k+Ef3+[2쓱a3td7mXT+; t+][ [2̺!R.m (!]%B?RvR+{DW)Rr-CSȞϓ$Pw@hI{@C-ZpYu!3 8ftيn/"+J?y=GCI6DW,a;lfUhŵӕ%]I{)8])\[+EWKQ7UhK 8lgY(Ri>S']=Rdb~~\|;T9懓kD7#iwիWAߝ]i/_Q὏__ut %/?|~~Qu.S#w/ cwhe>]]_\"X.SoM3 gw]XtOTr` _M黷d_Oǽv]|'1S5'GǑڿf6߽0>U^q/^=qa>5 'wg薅y!|]_7'g`~ q ϑon}7c?Ѻ|w^`/u>Գu2ffet -{jP(6S@5$SO"Q?p} o6~^k~ܝmp]vd$%e.gۢpp%O אO8j>p}K+1ْ#lZ*Ѕ+[Ki6qa#7i!ׯSRgFz2 np؇i06Qj1"XtmP,]ombѺ$dpL;"Ԋ"zo%.b09ɢ-A<Qr| ذ]+C^y-$R,}mr"]w@&Jr7f9b{n%-xLf 6sKcEUWM4jلb}/D4!]r^zBE4JQDhÏQOu5,#lCt-20T`&B "v 4fƐ8j1[#}hxS8RLzDz݀F|2}%iɗsd#Qr%[= T,JAJqłOb.Bz@YԡC[b Z T98 j@ˊC>wQ@D]v7X] Tpl5 PXLĽd=*B:̆Scݢ?K+U\]*DF@%/5>")7fak'8c5h[Uw˽AA{C8#Z}AI1x XWH%ݓ3I􁰦 Б" Q:&XH\izN7ipҴ`qgd,@@ WsO J+'P9fH57.4@6-[/@C:&XzN {JLh_"4 Ga՚Jy^[[K s<:M;FR 0ukMPgމk!fD q3@f4(@@נf7TTtg@Q"Ł.0͑ ;j:A Q?`Hk8u4Z{w RqF3#.tk^[Z4 2c.%hDrLH T%GB=& Y,6$@H=aCk?62(!M^2O_z{thz .M3IW̆KHN ͧ1K=ӉF0`߇y;vיŮ;]wgZ?z;U08Y=]`m$f3x:p2|J?n:@G6]ɕIWshC2jSW ư#. 'e>|.Ao%CJ$JDNkV2؇Nu#`4ӥeZCvd"LPbb3$Pf[Hw 3hţ*YȩՏoM?b{Bbyӝml:^ v"}M__nˋs|[5y'ȓ죔J2r Vl%V !i,#z,%\_nq8ρ 7D2&=^SpaG| $$6A>a-5Kq[SAhՎ a< A WuT_c+PK`6Ig$+AKq=21p ,.@GF9.ELˎL5PZBM!jPk bw pD.xzX8+Ga" 83 RNUh B`IWHڳή&άf4< ԵyLy,LvC6É$;}AOca,1[o^S@2؈q|۽zu?6~ǹAI`0uo914z6\{T `-M-iVn,<R 2#@9= MGidaRC^"$SiC[H )+aX 0\j*"8L]X|준k5c4&Vygnpzw!wI`c cV/^rqq}f/]d=FyAv@V|r͟|Ve2΁vntM$Э_^[퍤`wrQKhӓM5'7Mzlaw/oi8=kwW/_/ ih~O?w>v?˳&'?yNjusf ^ݞ_}[^0~n &%FvOt?#*8op6S H-v@@yaUy\8+f%Ь@hVJY 4+f%Ь@hVJY 4+f%Ь@hVJY 4+f%Ь@hVJY 4+f%Ь@hVJY 4+f%Ь@hV=J HxK@YTw}UY E(iVJY 4+f%Ь@hVJY 4+f%Ь@޵#ۿ0{6d2`;|˒d[l~Ȗv,`츛.6dbfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pfe&Pf=]&P ;L a<T<3"Hjp2(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(3.k% P 27L yK-0@J2)2t3(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(32(3zh=̿ SqBMS\_v!jw_?ނ9RhK0lK .{C\ifq 4~^! qpu/ĥ WxR݂+|6zpzo*+ԾHuJRb E}U{pdo*+UV]$ \1@`ǻJ񮒴t*Ix'WINi* +JҒ &)i$\%v_OB]ߨr mi;maj6fIIXx!ųb4Oz=^bxFw0 }(%JlTLOonɸ۫ju/*v𥶽 ILw_Š E,fkNju;  N#~񚸃mfPj @.S/EǸ.O8G^]o\4 $:Nl S몯}<+p53i3A`K%L|A ϕ:RjKf(LgL0&Y ~G6zv2%jw0%sH[]F2- =:V]](y"U&z-+ǣa/n㬽KmK"_jY"rm8~h&ݟu$>KR.e(332z@\ؾU<\%)I^'ppJGp|o:\%)UWk%($=]M$-($RL}+fדCD.xO`)'zd^r'%1HbEn'`v4bhFURKY |;}~ptaFGph8RIzxTw籗 +I2%p]:f9(mI!|M4^LkNy[`4ۯ O޼孟4H{~ =R#`)gTR6V:`lɤF/d_xO@6Ăok߽ 0ŝfOڶQ1:áoԳSx1b/4cvA|pRK͙XПr 9S@49t'Xt06PYdt<#L8%3ؗV9V"kD)ѪNjew ^v^u4Շdb_s6ֆ|r^f|QD_Ew=TnS =^i]:4}]ѥϖ0hg-#Fz/ƽtV's})3Hc` ̹b#vS?^0K}dqR~iN,WJ3|BT:cPR5EH;h5ɹ\Z~tg<-=jTWx1Moju./o^UƗnɫy.5: u%)OSGx6Hq+Ov*n( >5UfYJ@X&,50Ul 'Qg- NeByçǪdmrKj`PxHhnSQW2Z:$IH$ɨ43NiN1ҪqCv֚8kYmWf6[EQuL D#UQ0K}7)5IGFpᭊ DHk8ޘtR5(dh.~oz ~d1̬j@)H;L!A!5<@L:"oSƀD՚ƽ]RIMW?Ny!qioyfv|X )t'7`S0dQ(L(QQn UwFWfͱ7khB7iB/(:/T|OIvuTOGY[6@ۤ&;^] ZGy:y*y'3cĜEDPѯ/WyzK&gh=t_?S7|6iOCC4~1O:,OG:i|+$7+5gw^vgVY/ƬFiٱZ=&]z&5mwՇ!^ߗo/U:S]_?S 1D{spKTUVxoWFڑ[Ϯ#zh`C` *A[R.k%OF]kܽtwvh'nԵI6@3ݑzsuNY>}`(9k5Fa\]N['u87t33P r5Noh6̓`v}H-Zw}΍;Qr@ЕބA̒=Jk&={ Hzz$9=LLd a ́?kRJ!dTZ2CC .,drl$Ey끖X^\{G' z0zЯiA|Hhv\k}Pmg (VT)=L4i2*'2C,Ճ.fڛYgzɑSK#~=Xt_vN㲡M 8"cѦC_!.$oQibA!et!u`QfR$01H0#v*q^9gbNL^B1m ìU`E*,!)ZKIDpC,  QAڎkMop33+|ݴLݿwkd V'W}Dpl^pK+ӿhGh2S0"80+>~|r^`a/>Ӝw{_}Ӥ!TgҤK߮:Uy,!Vjq1$O;cv 3:$_~OK_nv L! /ǁ!rH)ӴL 3T|~M!MO]3٘dP;M|؇4~t ?L߅qWoi4r\Oi\7UO,[04nxuTS ;` ?EBw:ɩAm~%[GQוL3h7c7 4.t7nmw^Q`vTΧ?oTx<~&i7?>Lk# D(ߡ0 ?>_('/?/ CSDLҒhK/aBg-Q46 /'Z7n~Ӌy [qwUϫb~rIxͼyiV7bn^%Ѣ.):GZM?3ۣ(uNDG*RYQH$$ʢ#?U1dI\6J19Fy\[_izƺ[Z/|aZjeu8k9k+(vxۺ8(ew1-f_$^ӇnX3$]Χ-@R޲]Q:EKSP634!wѓ]|~v̫SCX#|]ok4N&m[ma~iz,M䂯'vBd|{?ǯgߧN>nL8H36}G ii:U2kP7RWÃڂn,kpޥtT3HP<}JQv tPwQ+OlԘoz|SǒW. 1[\-b-Ar Zj Nl%M 'אgj/, _9NZth>ݾ\;d1)EYĤJt7@b!R(U8對.W#^|>%/-C̗ۗ@gI$R*zJ(! cLH Kۖy"d- D(G#u c ϱE|d%blt>)f<0%6hw9bj>AW΃ޡؓ_zTp}_b X+hy<@Y$=Λ-8^J Qk2B[{Pq \8k{1t XSz1{w66sW Pvofg7> +aKoV~[B+z[αf(T?K&kZիP%x7?czoo}wo\y>yQ γ{Vӵ^升uvn|{VKͶyCI^Lܶm;͌q6/8yr8![:hs$MExcEgr,1w4ccpGIBIBwtdiB J䜳2Ș*q R[TxJD zLNmp{TUfɷǎVoO.:3g,iq|ЫT' Y92(-$%fJ6MFPAtl%$4le)$eLn{{Vl7T:viʚɗ-M_",hA׹DH*KR;41KJA::%eSA÷{1* dBc;(㒨䡱n&vNs_v3x׆^F="{J$ JgŠNBF)4a:!QB1cR[1EpPV"ФɳɵJ8JE⠚%ܢ{v.=* ɤ.HNtsb-F%XeJ%ztݦx7W_`Wp_^Luvhd$CEK0(W;F&K5T4y}w{2^e.t~"/%Pο駶r =ba-nF䠜ӹMRA D\&cHط={A/mhBPC!z@e1`W :Ho+=(+5俄A[(&DR61`95op@u_1$Ow=et" 1R]Yy[ubW-$QVkpfDN[/ufC/+QKW#BdPDwcRU'=GK "$( 8ʘŜ!rKi|.֪F&vD+o: ώ7;zzk":Kޭa<)}3Tjr#Y%-[_޾=89?yuZ{uS =*K^+צٯPM־pyM}Ùvy~qsaxWœǧez|ٻ6d2vU<ۻ ^ki!i3H|duPH$1`Y{US|m;Y 6jG?>^Q!. aegGxuèàvkYew,)'/(&ѷBN/UG]NrݨP<uV]y)#u`dd룏fʠz_r08J<0O6pHw&Iʈ۬k k")  ق"Cd1p 1 -%(8iv9ݜYۋPqٗ~vR(:>hR41۞u;B-y>Ղ|ٍ~ݫƟƓ/5K2/feǟ9G:om `={~ֆrИƌ >QE_mU іȐԦXR=vZosYMhh%K1Pb.F뱤H֠ Y[LQfu @:w}$J +ֹ(X(eX`bTu_g\c ݜ[\uCؖ)TqU89N0hW]ox|lfW/y>򃟽;*C>sޟLݍ``6<滐njy.=NwYY.ZʌLYerdYg"wg-h=_|?d}!V$tqiڹ|廥ϝ8xxkuB۝Wd0ř_z/jyv0/GMdz} ^`|4O(yמ8Nqmr4=&`:~|uRV{q574}/Y&X8)E&B@("{Լ±'e^ͫbw`_zzu1 V?c ufȼ~.7h1&P D Y y-xNzG+VTh Hyf0 m] "٢)Yoj @ e*6I& ϵTz5I6f-ThZc2  ec)628)-X-vT+r:t -R'UKy65vLKah5]AxªA*\))0A{1QI! DC/I^^!r3zFV9RtX UM%Ā mEa$2NHf, l`"*l*H PΖ;ݱuF5l*ݪ^¨|z[jx-[Y'tE1{Å2B TbdMQ+ٲ.Ec0;ABFk, Z .J$RcƬ(2g(zԊmbIo@A%" |!Y\(<s$*%9+NV5'I#`_k F96L9k'@F&eCYO\u̦<"L[m!ǟ~ѳmX‡ ߲C5C*,`PꨓI'3U1k()&^<c0m8a:GF?zė4%\B2 /rYq} 7 6a6(y3iq7"lͨU@ɛQ [5o(@[z+i(]&gn5Uj@>a@e%pMRKGPf+s>`ku3]rGk WF!.A,ӥHli_t:qW.ISKM1uykȬS緥bV]c- Yd=_ +?uFcE#23" xjDD.ny\ t;Ϭ9]FFɚ6o,K0E$HE$h%{wY-Mo_;K'̘3(,$~qf=h -7] gCǑ;wf;A@g/} Ks4 Rmqq$֧&ՠ|W'dK:)a.QglzW 1\ %e+LAS*`dDnM(Kj[9g˺N$EI9 3*Pk 9W8:U'd4V6#翎UdMA(,:$i#*B9"3YZ IEA!tkt f#e&KD`"t:ԘM e$;Y N銜kMCmx⒢QC*Z$`^(h=>*Z ]/XRҡd*`_P>,U:4dK(L£%9@L‡DL"[/N`i}pxm1/ߍILx2me  ED!B(DVɧ(]B40hQA/c[$c'G~'*sڪKA;pC@e|۳aq&zqJM0@y3K-*5hXXJi%aJۈ-ihSH2)Q)'.ZʠW6A"*]wwF&։1Ɵ/ ~tU58d+t?Cfӟpy;΅TN k/qE3%ɑtNIa]^x;z{$>lZ?욾\AW3>bOY:޾ِ}cN9J5;SNv2S˻ҶSTJחS~唥Ee\ޜ,vK9=.YmڹwmK/}C}vm9%X?Sbk9sC01ivT*WLm)'Z>B' rU T_ƦhE}L䈸ȣ"zCK'0I}kRDNFKaZlC6 #AJQۖ'-Kmx.F^L۰a<CDRF'B*KvTx+8 1+8/)3h:8o;';x>"w'3);/! 7b+Ftn\~>]6.dG}l\ .E6iG7?;n /PoNڃ/~7?go?~i}t1ux}UCUE7_N|s6~dM{u5~-ܥ~ &{9-_=֊Hd,&nY\x P"n(6?>~g9MU<&]pyy pMtÏĨGVjM>ctcGs&@k3$$d¦5@.)d3 L2RPʳ19@(0E[UqC+.$}>3I>%6ƘTdH)@oVo]FdލlpʯF\Mqj:=5`a;2"nq5'n/a,L R ̀Նq~3ޡ٤6{҆vnDD~ΏX?\Zf^7( Ԗ1jM@Fss.R)KbIm%1(A__RKx-"|s+;RB S$8F)?㨉܁sb#՞Vc%z]̋(^~[M:[wtѫ;Ƴ2?Ѣ^xX 2R.'J3G*ZԇgnT3Ϲ >'V sMYsg+}G<߼@GEoE@H M \ha=M.+UE%v %)WhU:*yɴe&x JrS`5#ŜF-k׆V)촄! ^*L $UHQ+Nz ۲FCTKJ/\XR%k%T!(8 > Qu`&@bqe%(r!EWy(B% )bcĂY̘{h{f+ɂ;/`.lz+'tS tSݔIaյPvg:ܧq(;>ʏဓIr(l9gfJȻsz:.G~JAOp")I 2PH+JD'#R\2q+Z '#*Caj3jP@{ C豉 `DmFN 箔hdqΐ `J%u t$DHiJ@"{JOeodh7]_dr4< aI鋋2񲘟s,P)hs~F ~2 LW˹aqSl8,BQ9EEyd1ޤ󣨸9?PfY=-ބtU?L@lK'@n0T qo诹F<a~5W?qݮsSUOWb񛦇GzȽO' Z DXR]飶ěU`̙Rm#ckGtְ5x,PtXxdR+b4-'}:-Fɯf4 gTi+Ji-H+Rm(60Y1QV>4$cx&`^e) BDN5r#Ny*w<U˨:4e4%h"ڀBPDST?v)a0L#a .|I☁ !f`ET/¾H P{Ҙ5rai70 "ED2";Dܩp0\([e5!-!jQϰqUŤ=FHH(QHLa<XobA% 9 %fXA9WJm,bD.5.y$.2..2b;-q\je`*GԤI12Li $i) qc񐶌qxxk1IlVaOH8 A4ͯ rY4W$zbw,߂B6+ߓW' ھr1ir_pYY7$*־T45JP̌ _XWjh>oU.ȣTȣ]TyO0N>,a@r fw))0^H 8+:mqkB:Fa1*O.hM A鶜\51ISlޗ:|=K04J$s jCnݾhJhV\`JEvY=M+Navt*'~:Nۙήoxl<{y>D ^<18:X Co |4 ~Lp am=`hN|z[ƽ#>BA;t JS?MâJ1iejU7{oG3YE]nldak U cJ#84 OHppE[,(l#TKe:{*WyPO >|m4K,i<d /^蛆l؀h W (y/aIzs,Yq/G vGOs0\=osZk5m'im i )ki! 4 )6Hhl4:WL.T9 DydʸH@kz||t`TX!!DTh浱9ixL_K(`G,S48jFr 8M(( Ȣ`arbcHZȵ&9ޡgXOwEsilzώEg5i=Pi^Ĕc:U0y(LI߃M|5 [P>R.K<LߟFx}k8MO~;>~~O'^IKz3N-EavQH&ҷd68ps8sGJ\i`C iWg)[eաˀ2u]>cyZ'`y7ޖxX\A=Pެ\ N'in8^}-N%eԇBMy{ lW:SQd.䯻L# e{;O>#u#lδ⹡ZJ0E R܅ŏ4,f]Xa1yhDp!DC0sM8(9uJ3Qʄku4D{Ql=qʽ3ø)amޝĹ>,K\X U9 NlDQMGһo.29kӅ[ H_=6>'}7o[:ٸJT >0;\QJdO05vu3޹fٔ 'ָo[P e{FK*Ljd<)V2hA #(H5&"}BUv5h׊k\twhkkgimLDD&p TQ͍QmQ'&6x'suq Z.aU aPЭ0| YXBųfrɴ0B hP}JPG9낚5Lxrs-,;bndިyB:a[°28YV*lA @햨-@ز8m%[O^6>'gQHCӊ s%pQdb.WHQf6@!"pL_0FuB1RJ[cq޷\g%K3"اL}V1`Hn؈b΂wDÒgiو[z#kBuu,`o|5چ_W_vu^~ՑrRD7[uKX=nM>%^T&Yy ~9.I|=.dZdOgBjaQ?( MѷyPW\a/{}nF/mL̹`1X>ZMAuoa:K=ncfx Uokr0?PWp_A8WoDx?dC y:N妪me,:p-?GQٺHS&O3^%<@rFϵ,wJÃ&{ 2Q69eS<`s-ʠ0 JFwM̢zZ/k 7v^qN?akTڕl@PoR- ʳ[i[R!rFg `hn)1H4ZXySگ OK=D ei\ ]ZPx)X $7mI=?dESr_zp#$xʣBa:Fj ʁm{mA)Ti&IPWtgopu7łC6Wk%]|sѦc46!dTa3CCy ͱuLGh.,O'.Rm"qq=XFbAFJ HX ȹ[%ŤKP *1۫qn.^\xր_IapA *BA hQibyx:994EA*8I 1H0#v*.MYi %D0̺DZ+=ӐyүS@:˭Z;/S$yF a#H8e>b-a"80Og1Gci?'¦?O*7/$` lz2??է/w:xAF:~Sf?~5wlF.8|/:ׯ3Tśb7/ Z)0;P*il_ӳO*LzX|d)PV9Fpuߗ0 OgWKd}=6B9JC@g-Q46 ?eN'nTװ`.hX1#hUߔ*`z~Y"raSO7)jb_ތF o1P"n~N;_n\iuC3e"TbU"WÆk|V56a+Pu=XbpVWL5RMԴD _T'/,.qki7鳐vHBgHw#= mDPichM@Fss.R)KbDPj%?ݱ%ZzB!#pޯ1tm5XJ&ޗ^]Z-u2.n@߂qnu]C+d瓺u[Z-4uu'g + 8RH̙g.7Iwh`Z:gw{3;ΙN;Ҽ~cּNPUZ!FvNk# 63K[X:bE԰3=  neˡ=<䐤XHMLc1>8;x)R+njk\U놭ѠfwGz $Iz!#_SW"M O旾I. S< TYDR\sǒ'[˜֚8(AKz␲6j+HXGxIb&A aJ;f2Rʃp-ȉq@#ZĹ"d,=rK!R#VQdh0Npv]րh] v3N8i~BLd-QPo?!}l $/-htd7( Վ٠|~C/9-tmi۠*"ps->ν98j.nU?L14ȢZAh'z"'˅ p'$.Bx]Jhp/ 80!'=l/Z;,6;$yR!Vi)-[h)Eu)pmH xey瀻nɾEdPl3`C8d8r!4AM9(#L&!fbܗAz̒hu@$!"#qG|e r 4'<xaj~(xe4d ))wh`I )Q+E&xd$Wjq> J /ۺ|#UW6Cq{s=cS qB:$տ_t '\.L ԊkS UmQIȜ䗗E AV 5hNdY5Ȕ E.XO#HOrjaxl8!F-@.WѶIPDR% #:j ;9JJ./F%t&r(3s)^*Aݲ}Uon7G1b ByF+>Mmz֞O L©hTDW kd ܚ(aQ3E3g |G 1I $T 2(FPɽB'X8&`6PӰ8 TzD&ZC5uAybPV2td)r5^=srq[q[aFo_࿌;" .@LW ZcUj\EyOG`+3(|񵠊FTe(Fx[:lmuu'E;=,ݤܛUAm3+5<^z4~}nqE+=Wr^%6 L"&N r2HM A.@"v+Ծoj7C'-X[uv7\;y.I}w\6:8Dž\"a"X2ǤXn4A Up֣/7Kg|=T6jL'2a7a$dž恛Xxtm/3YQۀ?5ШᢆDQ|jqϥ茚1j=;o%KUL4%1M@crPA@|<0Kȼ?cwxs1\ IYe% mp P&)_˓LyHc8 >[TWH!+ GęGՎJ,Ȓ0#v&^lQ7̙;`Vw[[ .Eɳf6[-uCٮOdjyR!ȁ[\X@RiS󵈜#'_'ǩ̈́(]gR PٌIYZBP3K 1*TBơTS*u*Ȅʺ rBҴ+uҷw7}oy-kw*{QH1$IJk%) = Մ"bJX[xU1~eKcƓ0Wq\ϴLkɓG:F6T@b֠(j8>C]qЌOVPY9B֞i~v՞kqSyx;Ϳxw$Ms~[nWs{AKmlDM;rrqFy$)< #862 g0bAr1ѓ՘tI!Qmmj%1t9$Gϟco=JU#Z =r o _ۛFg_>|ޞ_ߟ e;-j? LGpL/""?7w?}h!J -Fz%+ ˸#o(MV6m/uk@~}y;&miE-f#of~6 B 獵w: UNj<4" l>BݧWm6:w7پp;IjIQsΌZ1MCѠmp!gx!hJ6>7 1a!{^mF+&QgcI(xծ#!:.@ @Sna _ww(+zγVu;Oc8x⥄p{~FV\WqZ N m9`Um!:uBBL!M6D*(L}og๞CwC'c B{ÿ>p5VxIh<ӫT]lm +[$ S5ɳdOkӤ,z4Mt,01 YcK<wZS ՔN, yInOpqWlh`3R ʣw&[ l>y}!6"S*WyE_d޶tMUvY7E8V ]m<@T2քM)gcUFlSlE<衢'bzቘzK0]]®ݦ=vw6Up|[{xYJ4yBa0 Cu gS.(4ڠṛ< Vq|`.H# Bi5@cH:&g"w1x|0GЏ//oǧ&>7$RRA}d9RZ<ȒY.@I *Xyy5pRC4^< =)ң|o3wm^\RsYo r` 0CC 1"SݖS1J;y>=".э ŘB0R9f[Ko :D<CDqhMK?AQ|,OjS$Y:֗'EqRt$LN1Swc 1Z=:=<`!v!)U6 >ϾwdAg*1rj$AcHcXZT%, r 6m`T#GhG !PzDM:IInC=ڀ*kdc?odo]*Ue>~ɖF=ICCĦ]&һ!/oDgn !`hK2[tƪ?b0CZ8iFrfN;l 8i(?X:mɠ)^0f.k/$5!ve::gyNC.y&Th c2 q| |r2wG$l}SK;Xyn|9w&o-lM2lJ%mMC2'%Bp![˫?jk!-&xp?ćև6t?@BOHMflVpn./}Zu滗Vw\WcBK73SXuXqIjFDB=j|>8BIƦ9wA9a#A1e̅8䀤81D1>Iy{y}5kv=w_Oe՟~_K>i%z3zA#7[yB3׹Vr -s 1f [haHt%] k&z]WB wzrljk.]Mvάi&̫ịiNv]=ꁼ&ӐkFWk[ѕ"֮+t]WKԕQ_Kѕvt%،RJ+TڣnHW] .Q+b ֐j9*Rtt3\m[ѕu%u]-PWN5b{⪱?)4fClMXOx􊱔=LA_/ߝ\_l^p||'*!){ x0[ޣ`n/U~r;?f(wϟrŲ>'+淓.?ùɖr*^M贯?+M|Ot\0 (Snx+|w'7_]Oj3f^h?yN{?JsJ Uy\ɦYZW{4MKϼ70 LTweraAq(|TF-Wkο_^^?:[ëxuaO:mCe`  nfj~ikV(m-Q)אѕ:hEWU@uEcBhHW U;S  u%muUoOu5 nY$Jr.] ]WϭzBKb`U3ܣ>KWBGJ(mZCCb`] Vt%sjFYۭlKѕ6+Zѕj] ]WGԤѕb3Av]  5+ܻܣ^u%[/+]մ,ƮVʟ(wfvJƠ3 b&ۆ/˛q Fz[/+o$IW?Ra`@ҟg,VQ1 #<"cNUakkv}~˶{ ݲ={ WW>WkӽT=4^BnEШdhh&8 C;7WVFrBOJo<-q$3Ӑ<Jp%yIW(MЕznՃU$~8]1p8½CJp +v] em;5t]EWj-EWLDWThEWBmJuD]-J5+Ƶs/FUu]EWhq+] nPJh S]W O-EW liFWҳy-jS"u%K2)UqźNCu5A:ZW 7FB v|n)n`` Df\ i'Ӟ0gqw;ߐ8̞rLJ(Cu<(T J};374̋iAAJ]W yCMEWQЌ|+ZS#5ertwz30t5 }j4Z?-(QUた+uܪOFAC`kѕ:ۊ6uŔX`(Ҳ5+&Ռ+PҘɆѐ[t%so2v ZQ]-QW<ٖ+ |m q 4+E)-ґSw+K{WU3ڹM4GWKԕSgԸҤsUVK[+ߗ4h].c,5fц^-5}\|8n9W| gFsMhuK#9HNp?J-q$ɵXO7+={JhJr9rkWrt 銁 ۙ.s'5B`w]cX'MM3j-ʹJTeI ]WϭzP JjFWk\jEWLJ0Tu#ՒJp+7URj22 Jҕӊ6ԯ+Z'Ր84+mg0ȴuJ]W ԕ %] pCvn -abJ+n|}gqY}7h vuַA;{xܘ>ៜ=^g6w+o-'o7x[0+AkEWBߕʭu]WUВض3Hp]36T?H( +f ph&2:Jh]WBXt4 VMf\p;4hlN+znՃ Ԑpft%] -u%v]-PW8ZP7+] mеJ()t]-PWҾ!] k'\Lt%Pv]-RW[NWѕ+~0([쮫ʢ ]MJpɴ+zZJ ]W ԕS`f׸Aˤl'Ѯ|iښmR3Ҡu͌VFrBkɭ)}%UFt%9b7q]WGUؖ1064SpM3VXO(Z"YӐڙI*c=GWBg!][usj"y08ͫa&RR]+h]Wwv]=9jHWӌWVt%XbuvM銁Jp+)DJ㺮+㬶-  29sDZ^WBjW)] 0ftůno&Rv[)Z,2 {W] L_&R[ԕ=VԗJD׾&5H]^L6%SuþaLWʵ6P~yaD:4+Ԍxծ+,IqtHShr Jpm3W] %G^K=d6Z5+<!3%Vp>NXgfn";ԼFK3MמDUeӵajIw+,чJ嘓c;BJ&{4F.UMo7M?7}Р(iY˝"w-C?28\HWz'߭/Wopbus{y͍@ 9w=b4*IDowxϊW1ħ>9M&͟}~b>qw޷poZ[)7>+I}Mכ޾#WՇwlWV/wk#VQ~"H3NΆBx^7><O˼R~2AQ^!qA>?IyLo*_LW_T[^Q[w6PR4F0ph!@ Arvt!K Bw|7/Ip uys~[nϊuy:#$F K@}07X5`I+l)XYIq@ŨL>1*feV[!딍 G鹟uᩓ Vm~^q)geT"sM@{n,|;u~%'.sD>!IZJtRJ\Kfȉ+bd#1CH&{Om0D6t݇R=ՖڔmPmadfHd `,JD'in1ܙGpks3$=y&UiQ9@JnHyI%?QdȞAgZ{8_!i/6c%X,rw| IA|]!Eұ)J آSfL t8w 0iן,4)hR!TT2d>劉FVKk޵,ٚ,0Jzxx/dp" AuU "=Iu./b,U6gڲd<%C(d[5v^Ob6|:o {VeDM*1ڜ1<'*JI`Ef-H2{gQG%satc5H؉2VM`r9¯P#Z)dE<6ؾ fJ¦ VCIq VhrЗ Teʦ JE>I+u1뜵'Q\2c I&l.-6JR sc`䒳"ac9V@`&Y)dS(! Q ch+ե$PT":˄BѸ i!ސ ," VTTPtB[Z y9mxAY]+( 4 <(EJ<Īɖ>2h y27p,jB!HQN]qah P i\Gg*){u8ܟj0EiژD>r@E>{JC.((AE!8.H1x7%؆qo7WHƊHT:h* \9zd, |DEx*dM'(Mi/@5tUE`^Ou J+h#;6B͐jPo|ͩ~W(c)hzk<(P, Lh44QD&+eB>7з X,xN`&BC\9N ufS.K@BAN[*MZl{R AEMgdD] QQEWPp4Ȓp (Ey@PRAPSJhHflB)vNeFT'U]%ٗ56-id^>ѿթJ_JA pBi5%Y* N |u5ft菅oVMxXȠ֙pG~rDAńE~ŬQ@q|cL:C:IFDmOL0^ /ϻk?[Uд-YwF<]`漄i>I| x/ /i@AhKQr]e =)hyE 0X|Ѥ`B'ą!()DM+ʫ2l|RT+Wp,9Ѵ<ЯT4/> ×Td2jnu›!q*pt/]TBNU~T}%*U U l+ɒ3/$Sa|;yc V@P0i"hAˤ Z G̀z"=peT&hJ7a#ȃ=>dN p:q˾ %!.IR[1xx pip3;iSh\CC,2c4&oBG!:.Xzꢀ+9"[ U\2yS{A{G[FHz+% ՠ, n-nZ,V륀tNXq ACĶ8U%gМǽלڥFs `o_iL瓭_`s_C̾%zl4lH;Nj5Z,~Opyݳ`Bw anq6{XpWT*Ly O1/+\\KGZ],^w95"4.h6O,zWW78|50~l dLlcm ݙg$z` ^;Rv=E' G@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; u!=&'{s:\#UGc銔 k4tʣyEGS,:Jbu(?GDW/|*\EAteF[c+ ~tetrEw-i-㼤1/j}{d1b@[ܽ)|\kUͿ 8˺ޖɃNƸ/t2ޜa7e2axY~/tba,-VW5k H-YOwܹf?mG$&??\hbZbC Y CfIsgPBn'9gdNih(Cp콊؂M!ϓ<$ߣlgٴJ:TIm,L Tr?hanTo7uYtl(|Rk$k$I`8MQF->5g{4~ݘ+ j/d=>tflqy:k%E3aΌ>z~ofWw!| 3!1g=lTQ>}V}5lph.u?(Ot[:\:e:J' ]y)#+6$:\s4k':Z骣"]u<?:hQVWR8 !/W8>j^OnW!E]#sGJgĹ w$^]GАzod6' r>?ǀ8KIm4/1oBu̯pV::syJ1μ}7Űfz]W_w󲳿}5jׁת TvLqkNӴf~}q3i~:WׇYFXq6Zk{ BSgͤ{0}?n2|XR+RTjuZ Ӽ+- %b]&Sre ]z8ف;ۯ Ek.yo{?rT;[!,6/-׌7FDQ/w9$͇}u'Ffqqw0{_x=;ug,{#z<,o/yX{0ggRgRn~Y_3c֬&?R 9iP#X K?%8'I@4ח̸Xʡ|}>7r1Q\(6ϭN?KLTU\ ۭ(w%q>o|H|B ~jO}h(٤>MtIaʘɚ twmHep6Y ,&Xf/E[YH쯿bŒ-ɲBr$[d5Z"`ɯ.V%֨Y%VJ<AI7cՊdXE]RH*(QFBȭj4߷|akCGW]Hd-%SU&t2 8p29 Eop˧Wc,JcJFriʦqY#l&MA)U9HϱMJ`&&7a<ş!ds` J{hS Bn,/8Ogk+ 'z+xZ~$_@:O~a$Hv!E+͇c|=e퓂_O꼼of̖ N#(w=]vzlnqp=]f(ǟ0BoUkNǟF؏;oDoF=\f9m_iRGTrsӷ.kϦWί-Nin܆}Jp"CCw7twfvi8]pnh .\[ovk/8ةhG{X\yr+yD7X9Q" z}RG=u <_ܺ}#շ֞b@cNN1#ʖ_œf:7ᖐ`[nJm5Eڞ)&đ6f bi\}ѩBN\J'FRFF'n(;mPxn$Kҕ掇hEN^)3VkǥN$p1R6YG3RMٷ#5_o~`/O6$FNk({Ex+ 祀s IuJHsUCQ5VaCr,ݸ1챨'XHu,aza 0p^?x42b!Yv!p!Dm,֠.%@`,B;B"pzF6C`%3HYrAc)#jSvJFzεTmXM=c=RVӌC} M L 2>,I&vCA2=y`rΠ́O6X\xBs-J1tC̊=!)۔*pfv \!\F =v5qa4L+Xv58kCe .B0Թ˨ʑQRtd)Ĥ:7܃dyyYSrI.3!)A:&c`rT7kxTa3!$Z,Iei <)5q|u<'Rm\M=+.7Hҏ M.4rw[o;G~ʥe0HF7d33"JbsAG^r*FwD]@tYun1)' ԣl&@DF Dm d 85 7qyƂ UFh'hĄdH >ʐŸi(y($)jYF3e4ǽ /y5(9Sl7w'x iV5^ ;W Q=|5O%ZMA-x3ܮL{9z1Pj(EZ`^ HtW VCZ)骯#,CG~֦"kHs65d0ʺ)ehwq>F0|N\2)lX,;]uk؇/MHIczѻv9W7_7*[E QQ:EahGB2^vWJ\2 LQ &9%4 O3}3vrZg2f™@hx@0zW3U3gʤbrJ> Y{ŔK!dƻsŵ,[X9^ġ8򐸎n<kMޅ)D+o Z<3~@˫[y;9W.ҕJ0h}mjS4W8Ҷ`nQtxZ>YŻ!9eQU^UP$T baMބWpl _n=xezݔzw? W/r\o?P?njus]R_kh^JG|'!He| 0FD:r)s&iǛ= W#נLuqy #8cSʹkk })-MA>O.tRctt~QjՑ!5yA2{t$sGOMV:-gIxe2 Q  x2Ԡ%V} qJ* ْfxgB\@ä.qe1*H0fzK9ɬ3h ˚3G|p>Ty*KC*%ʑqB%,F{A6 \+LQU,8u '\}\cS:-7=6R {x,O^U9T9^]a?49$\ZH#R )Uf9]9RQHe܌IE)h-‚"X00|ڧsѺ`dKT/sқv#N^xu0:?ZZ4<*^vhh'e(rV&%=pČ2rdMKsgڦ~:k&Ea k̑ʄ)W$K MJ;`G!cqr8Y. fcG|hy LѡӒ%i"mJ0eh# :RU4?wyZ-Ok)eA;i 8Ц2zkhyB$h "DptU!5C23S*t ffl!⿽)IϿ5,1LZzG<oO27ˡϤ$R' q;9FJ.㉟L2 >L/Opſ`iV.p94Mfp[qg~R(L{Kx~mT?Ϯ^zz1*8/ (RK?̃llە ik.w?+6cI=]nFlFϒmfUYޑ!&ZE#XFb'}/ګ`{]<^=9mDž5_FXʽ >'7c(ҎF g˽ؿ u/{wϾpߞ;uD#0XCmA//`E/OwTE4ѵ5z>k>~ߖrnm H?}7L'$GYl5gǯ,aWHl2Y>MRTΚ[u!@0 mge1];9:xOO^9OҌ;%Z)kPѬ'pȉ 1K!^m3|x8,\^Z?1p%!3:̃sAdebABƴ Ry˸w,e- w:Uu&8:*tت@p~՝oC<`Uuy(@*$0Se+K2QثL)L*BIGhmyѼXy:(ED<}0 rk%jT"X7Uqbbu%dh%/Db)9JH63*1nt̏?ՙؘ?{Ƒl Of~Uw5|8E&XXB?%%);}D=PH#5왩:]Su\ yNzJ2ߏ\/?(n1ՅS a"Es[sVKϯyѶYt_ҴR7QZb`u%ew^T,^uW=W|ڕw* Vu(8tKW"`f9x12ښCN/e'fn/fNn]^Ͽ[~`Ⱦ1Voh5`ATyąc5ХK6>|M0AչR,+鿒8 ?+ömwl[LP>zX)͕I&A1J`.K}@[H1i"E0;&g_j2|Z q닭hCoCx1N|4[h:|rѿдؘ8C)e&kEXj S2)ޫs2/e̓[NH 7ѫdiD,:,rXpggE)y 1|UbT'-W*'YB2yB m'EN$Y +0cY!WP1̘Aifʆ1qhgW+ AQ})kN&` }wr&.=fEb I9Ź嚥4OE@}ޱ[W>h)"c DBS$ ,92lss|Oo8i~s#B<Ddeg] J)re2B퀣+ntp#lv=;MNkA -xLgYYh䅚Ǔfҭ&A;kknEd+:s=پ_c*PテoE2 U)VsdQsQFe $a$ D!h<33g ɗ}4֡AOgeb nʯ^8 !rsJHMvlJe+8/o}hCp4oAI\YX)Š*+ $BU1{m ms-%:[b[9w$ i'}ra> _9}qPӿ6nޯ~fՐFTJ|B+ A#ixY2X&8:D "]?^0UBn3i匷K$BA&SlUP*;UBG!@lX+~'@#1>*Ь`0 ѷO4ZVc@b:{VqrExR#YEJ5a/] )qH ]<x:?lyd/9 빳 |%B9-ْsY!vw݅aK-}8ǫ#G뭞!,&2&" L .I p@48E<7f o0֋фPۄStd7 H KMx1Yf1͟zqhEmRID[t!n%iL.Utn/hQA;9A/h¥^M(S$H̅h*$[*o`wd2}&6 MN5YfRzaߦڦ_|3B7a~eW]|qxh3bY_'^m8;_8g_߾}g^ѫQ\m,qsyt.ٮ\d>ݻR/d2oJlYcK.2R~>{q0y[ yN&ÒO%RoWw2~rO:s.Mo sˁ!9;G\{d鼯\Osvն?i4yG穕p5zέ6 ԇئߣ<0&TV Թ̗,0C"k %wFs=W ϓ2?]??]< () AmL\o(l\A fa+dnc݂"+\_hc pF{8>FiptCRxIxTϬ7#8. =YAm mDHyVM}L.T`uX4?ʽ/*{]خjŨf_JVL7폍(vȀFyKebt%] zJe䴈hUgc{dc|ֶ2O7y[eŠn9pGˁ@NiQ0Xp֚&`E\aH MuM^`0 9*WEJ\@Ҍ3qOa]pޛQ@E%OGnŬ9soPq^qq֗xJq~(Z7J E\hM"Q7): FKaU=pUU-pUpU\DBɻ"P"l"r~UH-"Ho\Akઈ[ i%w\=w|a /n;#\_}R\{ꡏ$[W$.ڻ~x*R +BY"" ֮".fォ"徽IJ-[W$5pUIUUW/ZWE`4+k \i9;\)^tpr `JH`]qWEZkWEJd\@Ҍ!-Ƴ 2>V{R*j> 7.0lsVD&|Dg W]YoI+bf]rI`43؇^È,)KR=F,dE%Ů2"2vּ͛yt83e<&p~\ظh57Y$(bVM csO.IW>:űrcأpwfSKs)o:8)1R?>j۽*o5duN 4&qkI 6cHČSJk@fc>3,.ޚB8Yfyy7 ES֪T|otF%Ù)9A0sNYeQxg{Vνg)ۻ8߻F1rG8~WZni]"dm2%>jL L\L Xa&jN_]*=,2Wf[U58_7zn(1o?_-|"E3Rz,!Μ$591@'e;'8Į\'UwYk]RwVҶesQMbۺ;ۮn㮺u`(ni۞ chFL aM8J3iKsDqlPt@]O'ѻ{v^JC_mok ڭKzM[fJAo+'3,3R $8Gz%iUc)B1*e}LeN*a2F A&IAujܳch}]NIU'ԣvv4gZs+C:Rmy>+?HX1^U LL'%sl(Pxy0tP^QRdF.@&@kklSAYoΑVő6:k__":,/MGw'g ֥7k'U' 9`*A 5{a )qKqxx:?ly6^rr*i2~NQkL(1rb!,bEY?18 iJ_zg=ostOx}uG_+ dSM lp6~sL'aύ;Lr4׽Q9kbxxTr&,%'+h+?O03ZhB!u⹵ܻKVgcxU}aE~ȧjxrl1 WS4*zr{Kc)H SV6cCHq`x9my+0(!y[n i 7c|v|Ӭ0i-0N{H}O B2IKled".MrLsI4`fНѹ1z4l431ǻLwKQ۲^<2I^8x@Q{dg2'Kl&0k(F?h_^Sסa"j0:͡>E^̳ uLQӆO>Ia9.Hӂ)B%[S2Zg'dؠJd6Y6cg69k0Á%PF;"cFsVYX#NXI>I:Ț;ЫzC~Ҽu)=mxSө}3L پ)9dzcg9k׋8vCq}tۚ#}P*џv9Omި_+'WYNlfQYҨ,ϒ& aUh0Eِ!H~Gէcs}9\\Thٞl{ApUP. GIUMvt+ã%׭`Xv<VxFIref%%)1G'O/w.QƠBܒM|e@5rQ }%o(n o,eg.T쒲֪-{ w.f[N;z"ȩͦܧNi!3P:-Tjѵ:[: fL6}Pv6clA;)=(NRv^ ^F]LJYAڻ[7щ,alzqhӨ7qE"{=n~rsNm~n^ގW-yocSy,_!I̦f~6_(^ +}k/^oca\J; i,@-;Bԋ!~`\8l3N*笙cJ̬ҬQx6hH Tto:8`N' Zo8!]0hVJZ;.t"\|)ۜrXda0NV2)aJFg 2p8f%8ϨAJ)LB(=L*\Ҡs,$WEl 2+z (^HQ`Sd7*Zd1pY(>!e,TFjGl? TPwPCN֒fO)ۆ- FOLdV$t1s"=&C/,d yaf1`D )QtLɬ# dT'X[~U b58]eDt"vSd7JI{NƘ&KΑ6XdTUk(uۜ8c*"%i%rI>J&neȒh\Cُmz􄋳USuVqQX.l"kK^R $I9C1ˑD(,9.Oqǡx(+< a{z,"k^o ]Q(Q2qΖxgNT*URlw715V+Z#[[@p|EB: YSJ.3>*tLNY!'ux5vN[ F9`h33"HbsD# H CgLA{K&E7^d2ٌ1B:Ix+ H5r* Z|R%C^Fh>-SL$Hѳ !q1KH 1+E`2eg@ԠPs7 .H7:UBN݆L4O @yu|sOSҒ1.ە^Wl_#S{Zq%\`xI-a Hf@XyZ:݃h&s 6&1 D'3Q241{2$?{WƑ /ۑŀs`7Ag%P]-qM )o _%DcrNtu=USE(uH::GZgB 1J'tV%w!!dIG*a2}FN"w8d[g?ueO}q1g7moݽtG .")T| >sD@rYf I}(^ymmMKt{+36 E5s\P. NFփ+*TsV}U*3fEH4C븓,>%M!} .8,8iG~IѲ(cye`-~%OwAnHQZ2 :Q`1, ˝'\@3itrC>/v 92ޯMbm4{Qf2ϣ>Bf=٬/&8 TIb^!QӲ4l*ϑU–2F'˳mxAOG+rKIJ`&&7aoŲQКL̥}5+k >P%ޜ_p 7 3XsqDg_ ]CZCLLZNGI"9ZJ. SL{\8_!mlp}eFVX =U/%E/zܴKrC?۫GXUɈT~_Aqk_\#uoK1}~ήᅵ]5goB{|jPu A*  *yaLً ty,Sr&ط$|9 zx6@۹0OX܅<*yXȏh"nԏ|UͫBՏ;4}z/T]~:KZJ;R,ydI i#6>+}7_WxOSJԐ>(>Ȓ驫}m&y>-x:h1Nff]zbT 0{Prs~1Nf[Qc(w䩕+l\&m&*Kڗdz$?םcjze5~ :Bd[P|<9-`>џ1RU9=K- V] ab0yfw-hJdg`9KB}dLA(=+X! g·(ާ~gn.M6^y.q'anekgN } $L %:cư9sΓbJ]%Veibf'WNxN裴 Eh#ȠҲ&JKCٵ҅ڞ&$sPG>8XW鋕B)IsrܔLmLɎk=&)%o@E9C0ARr(sh}0IgVёǥ?M;.a:^7:>XVdtn8=d)rV&%PF`18np1Q#SL/u`\13BE4) kPXe$$P+P$; ,i):2`pdž8 EGK^YwYAhy LA`eWG4%X2}*n?:W[Sqۀ6% q٠Z]E ms6 =z33`6AX@"x癷:d)y"Z]nN`o?摆-^Ew1,A 8S_H;Iy ?2L .Iϫ롆ّ:IF﯈'AuI>46SMq2)qII>Pܑ3P2Lp%g)0i|9&xz_Mu1 ̛ɘY-M_hB񨎎*ysa>;Aԃ?ÏwF2%7ߦ׫wׯ5dӏ*N,e:;ECή֜ 'dBp|RQCo垦gKx~T?7G8?~v])Tœ0'Jۥ -u'w7#itWGRzHofy'EL8{T!24/,ϷܜVz㨂muvFt^3NJg:Rִ|2r _W"F;k]fiE 7ý?;_軷߾|?7߽?G}CV40lA{XZD݇Vid[wZ6z:Ọۼ ^ScbڡǹmH.>3L'MGs֣lYXͦZk^}&5?m^^/W,/z q!Zzv;ے0hV~xط+4eVJႵF(hrSH 0: p:?-1Mg:G2I>Hq,eKNwuZyWM Z[Ͽ ĝ|7XU]ӤtԝCI}8h#GϬRJ7 0R7Y3|(b;s;,pcF]fi9pU 9#S!9pYJE0G! %[o2 ]2YWW/ 1.bR kb ?EHX^o4.H߽j7=}q84܀hBp~ݎ@|`29XkpЮ0\7z"e#44%6(hm#5lAeuґ yUVb e쿆\;f. csɲd")riA9-WIRC0\>+nF#X{m0!X+L涫ZxkbҼbRNǮ3J z|ࣃO1ɉslh{x)?6d"orX*=*GI85Du_#~8R']-SABiטKs>w 2/A>೰ǜS!vaN-h Tɬu<{~:>} T@wA2E肁pG 2X6f)gl.797\ 9#|. E2VRsY/@Kȼ)o~WeyziXٯ<˧,=RgDKj!S.\ȔM>)L+$SioZrO%)f2V x -+wSkGxȧGdh+"j!n>TJ8}4-d茊:\|tU.Q`uW/NzLJs Q4j0J70 >9sLNyFGz̩#QjD{IsoIН`!yPrP7YW+i:m˜M;X=3Lx`Po^Y?;/mRǕ INVT9Y)` D2 &||;%Ge{گ`uzſٻ6$WڀquIwX'XCW1E*$eG{g&(jdQ-q=UO?U]Km3})(A,5FF搓c]VDC:rgkcvf^Lӑܺ9R5>{dF¿o4Ɇ]? vԧM;R"? { l> +Ӟ< IN=NBwtW˜q&jl"-RKX 8Tgo{5J)z68}9_鲞O.~905Te/0W2UO\[bFU(5=GI>Ίv9[k75+!yͫ]z⩼y%C:Ϣ6A,q'^ΚV6rOfK)ҽp!07LUwWhty7z&׻W~ns~ӧ7n =t$(`9{M EScZH}CJ]i@Ўc$zӚ#k9$ H({, (er!KR{KjTK &;)`?d$JQtWɇv+۫KA-K1^‡5]j@W{JIz)]~lm>kqA}|۫}쒾PqRm=Lfa -e[+᭨@8R9e[hMG̶`^3~(Ed[`*DSOdFErIn@yY)+$'` 2jS3r[:y"Y*aF5zɥhw D`sP'4CYgUQ܊` 5^ORRq\,U1`'m@ZR᪐2%!2 Jg%sZ>yYHblDzR :cy c@~j3́WG&#_)8d?EJkf *IeJaI(͎;RN!qI ɹUCMj5Y@!~VT0P*zc 5qv(Ps:3-MqCcњBB¡ ?ҀwU)#g$V{nIdtccĄyMl.D"7Udhr{V hĘ%=)`@eb쀊38Z:L 62&Wi [ӌ}]1pG,3,xC$L̓P`iПL-B)9j <(蓬t*l.p]q%XfIz-Z!Qfa(ΞI6%& ^C4Fɷch?!en/xĹhW\̃ܖv2jcݦX:HeJNRdtjLG瘁xLhɾ r /:dkb%8FJfY :vĹkc*x(ؚ~싈eD"v(rBG̏S$LQtZZi-6X!PABCtٜIi:di=eHI C \ģ:;n8W9:L*핆XiW$~ktz\{I^ u *ΕZ`6+e8t `rKxTL:}P쫻1m,<=Id$0$P6 [ #A!H !ќ],܂5xJT8SxvY.g=qȴx,UG0۔5J/ ̫l u; Z!cR'#YKǔH5\Ҙu"9 Fgt#A!kH @'HPOt$M{!az5g)т>IzR/A `7 _HCޑD[ew9,ѯ 8MS;4N$/,S*1L.\H_.Iϫۡ:)?O2ē 7yO4Τ}&%;w3P2Lp%Gh) Gn2I~pu4ffdnY9X]`,y1 E^\RxE'urxzR( MY~1:?\LzPHW Wlhd=FׯǫU=&Gau MҺk%geWdI}ZfwKx~Tl|__rz6*9- af!̹?̃Y3.v%@B6.wo-)X͖@o|S3bc3)>UeyGpTcTI43=Y?OtNت`[wrSjChuRbin#aEѸ.`|9!ʽ >G7jpJ塚taasgw'߾o˿>?N{i46 ăEXQ p{m5M͛uh׳ߥ]f7{|avkskm@RtM1֫ee9kW/0+$6yS?#jqS*ky!CGZ&\`+\g*/OR[e2R hDь#rb}WVuͱ$:I;0bk>hU3zA(csSKpqgY.1.d[ie]砊ϿHޫ;N/ \U֝'ٺ#ltHCu ^ig}ga=u׵͈?;/jld9y`#ZFԀ >S 5C"BFbsCDy}MfX캺rnϊ~ So0*s@}:K:x ?ьSM@zߦAQ{swpiׅ*ܝKr8G.<0޵5m$Sr` 䢥)SɡWU*Y$,\"vUK\SJvt^)ښGBkm~`,$TTPeTh(8Jɀ6tn=oYe9Qr܋"I) Y[r '༤gDۜ; X.’Oo1}=TA6M+ގ/MV[y,W06pʈ 49ްgzD˄CV#( Ŕ:Nܒ*pJrIZ^dQ[Ű!ie ) ]rZY:6ĹވP׊kaꞀ1WeA׼k~Ѭ;~o]^YڈfqK>PQW$0RH.:J[HokAj5"v$w*볧!{u2;C.%G8<2b^>dOreĬw@,Al>ke(*T  ?vr3qǟՇoa;K6侀{tvYNzX߼κ6|JuTlCXJ&')NA( w8uL'no4Mƨ`lJBѵ9JԲ y0u̶VDX=LEnk|c[7yHe&:cXQ'+$D̾.[xV e'=1oԛGwqƚ8~LCnkc_"NFpDUGKz ˔-8>Gmb" _ʬ&Hmic._wFZ=!vIP Q1嘿A!f\ؿ/Vm 3|itw=. mF|_؜t| AD(u$B(Flu_6b[%G拾(R,6BVBk+r$C TEIK&{wB} -v^ҕY3(S06s' 灠ǍwPR 2stiuJd 1D[5AɿS[)鉹pHHAȉjp&ƔTC2ES$D+=9ғhKAʣ51e O`D* x`6>_6$egd? K2S:VUĹނ'`dzdQv-{Փ_E,=h+>|:Wy4ݎN?nUZ혥BݎnZ[{lI)ٞʦ@ 6n:)136~g rϚ\ D9Ff‚tQh}LZ!v*@(v5)Yr@%02 ƒ5*ynBɮޮĹ>]3_N'a`<|¤Y [JHΠ!it #*(GJA"K1ِT>n׈ QsrJS|^&+3D Q)^ԜXK Q##vJEDTR8VW7Nwޞju};6_s\f|^t>yݿPRg4=Ny rkj+HG,h$9-xheDd]SN{m ֲ1"CKU -$ȱ.ʅ)hF=]贓>A[fc4 W#%zչÖg;uca`}iy+omp[J<_0lm  3ɌE Qdw._1|=>| *p3$, })g ή!߁@BB8{ٱW5#};g$OnЕwr\OD<ӬvH{u<ޘZ*m F#H"2^@mXɾ&Tst1ghͮw<8lqz׳yD,EA6` !DIPF K3-MzӉpGs8=xV/<3I2z&'O)lI)ʔX۝:UVߙem}k)8T0tPzy`!dGx '^ x5Sf>⩯W9lΈ3d[ „F*mR399ލU ssMkU*WI_?O{R'VA@pdA19 diKnlqPo4iuy~۫b3Fi%tc/t?=oɓ G(7ʯV5?b:\bb/Ier ~td1.>|yP?~??|CߖWw"_;8_ێ{v5zvG<]7]9.fÛOUojCt bp_{2j>*9p{Z5tԙsiꦏ~V؜ ]O_N)ŁH7d8α׿0/iv̞|S{Z%\޸sMl) <0m ްiX+x^LZjC+qӀA}yz{Yß&ӈ* a~DŽ~YW=NeAI`'a2;bNƉOƕ:sV$ӵko~-7yU2̣}kwAW W=|z響 z~b/Y #+sI=t~;Oms#醧xO.Ţ$b8:|:kuc7}7ySSvݕCMQ<}apϝ;CY* ҺK/v nwHN"xӏ<*mu}-ѹY%fzjOzjE˞ڇ3mLW/l?n wUx?j(zOmi%Dcwf`ٵpߣvfZT͔VvpnT HW M4"\) ]1)u5@]!U=M>Y7E6u[k2M1|ɜ/MdI+ ϛuI \KWYBRR7ꔟ/:]=~Z/Ƕ:hAʕ(!Orpy҈٢J2L]NG$Fqh4h~^L~R|[Œ7O{ _z W㲴LTm1E/IuVYÿ\sgv(vfǟ^ʌ:藲`QtoD#ԹptluBcMl!"6 kjlWu]cwuU_MگŤJ)8yƜuwwY.&%jU]q3u[m&^Z0V#~b-?t/6װf:RMJe_nPoG9-~ZIef2<ޭn{}WS|*3w_BDץg= rruXNE'_yGN8qjc_9.==﯊]iY}69gI&|9//(b]܂cl 0;q@2~b)\]ۖ@ă2>zuz㟮͹N{ {Z>4 |lEQGj^};S: IԺL%=S7,޷=0sG~~†\YI{g.}޵l-syԭam.t`=c74?]jLq`(8?]M Co_e)CXgp^yRy8l>cC7/<7Qߣ?)S(_?'ߓ&xNpv'\zqtyv2NPN&;U;yxLxqըmI'*)v "ؙ SGK[iV;isv37֎TI=(VA%QV,1 -m')~Tpg񣍴ʞܶ9.~.sZmvʜG+FHkbGb=1w}x.5.sw2wcp]+=1o(a1n ٳ&z[5d%ܸI2s˭z\pU*g]8fjThjAػ*gtS4ѨhF3fBΔ\Ç2:*HWE+5JĢ+bav9+R1銀vŸ*wL >t]1q+tEFƳ hjWL2t]1XzCr-Mh[N]7ܾ:Z l(AWnաY/5y ؂FWk: 3jR IW ll4b\gbтu X4tEh]1FW!Su5D]Nloͣ-QkuďgV1%v?`{ur|ga+85#kuxaV$5VNd"]1mOtF] PWN+1ծ WEi!S=jm]-J2ਫѕ«t儴Ԯht儒RQWCՇGY/V }u3 vR>f:˫+N1h5J)it^8x?7W5(i:oQY{oQM;3wOzu5e]_o %3%.J@k7Ta?ۣm }rW_+Mo{Z\_i֧}agP?yHoOa^{n{:j'IYl=Yf~PTӼZ;J7*~.,G Wo۲9 3pHE }{?=ߚYR2q qyM m:jOxQwD.ʤ(*4̗WH?"&bTgQ T,Jv>Bn|Bys'ͳzM59ՑJj MQ Ar#钍5 u}gtÚݜ|OZ#vR=cGZG7H}m fQUޅ6;$XKQ 6Y ЗI buRƗ ZRv|^ۃjkT 9>$]^Q"R}W:a W@ezm|#rd2*zSJi_t#fH57]Ґ?;1mguP0,JLhW4 ~DUuASPbt'XY?C7a:? |D\SS5qC%:sQ%@ `. 2MC{X +TtgqJAQ"Ł.0͑v%~j'A{T?!JPA/uj8Օ/H!j 138 2+=ķM0 !ѿT(`5R;%[ l,@'@Hu{PH&C@U+cG=D &23.} 5_ݡbA\ JHN+ zPp*_r>`xv&^ͪ_O9s~`X*f=f~pvu&!jhX 3 b{PT8xi7@.A6#JhIWo#f BX9Y4<#y䄰 (`QBą򠷒!HDNkk*fʇ`)C#`8ӥ`)`Q &(!t_15lm;Bdm:^zX1TBNu~d}:MOU;:@vTVB]+!Hb!w5 CӍ߿^^\aEUv!ȕ'X1Xk3tdD4Hcc.u%m6@^AJ<.J3nD4eP0F4Kq[S-Ah[Ry !z; R(VWC+PA5H#VF/C̓pFL"(يG+^ca7ls$Ix2$Cp Amj DQ;Pf{02,Ƣ cA8gPH!(DMRuPkV]?WHڳΚ&ѠTf5áRk4[oMu^I";BZ&B;h! IHu |46,A r7ǩ ]e7؀q7Yt9/`|1oޜ//s=W&͂ ]@H77kF֞5`}˿{B!bjѭtcfԌQ?mAYo5aנhdx57לAPK}4 ŢW9ڡwU!t(AdJ ךbdzlZ?-Д nF;k|TZ{m(=mGm(utqHM[EnAc6\WAҘ"&b9R Y 5LBER!6.XIjl$dipM(]*?#t=AyG-F(G}T1nor,r KI'(O44AlYUӟZ:g'sMTРh#'د/x{qŗI`Xz{ rۤ]g/?,?.6Mz}ꆯy|xg_Jg_Zo\??=]_-~h㺭z&!*N赳ɮ P~FN 32.20B-=v'N@' $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:]';99,&|b df;2$@2,H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 tN W|@kl@@;2Zq(x8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@q4''3'PLJqG tN BB##N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8|x%o5VǛ6 n6uy{z @nN%+qŸh?vPZ-GqwCψuz.th,e)ҕW:<:vu~nz7/Z"D Qk \,/{k^pjd{Y^`޿4m`|VÒ/>J7'~uJqWdM\G +PLRncb{N(Ym;&vS NRaȪ6h1zbʳ |A~B:َx5JSq%ayL =>'$mϣELY"[[\/W6gܤͤ=l5Q:AJ]3Ćb;bdBW6c+F_Gyͦ#x~K^pQ/KW*C鏬4{ЕzkT 3+h6tp ]1GOW ҕ69QWƒRq.th;]1 tute'gDW 8Íz.thS:vJN&vl:?bQ;]%Y-tut*ogDWffCW ׹ (uutj?JB`kV2r=?ЂUqNW Ut}/ }=/ /m/5NWɪ)UrSGWD4{^lAFk= !9چSz鵫:о]퇒݃S^{|]igCW \ѾԓBiINBW xF p|}P>Zo:AEfDWl ]٠Us+˧|c+F JnNꊂSΆs+FkQ)ҕ ]p)QS+r}^إlɲi Z7b :GJ' G' <=VѳiF)|:x~Gvisq7:qcp4wݡwοzmͮ7pU^Wz-*fly[4j&S}Ts&F?gWk~EWY_-ߩv7׷}wZDf5O/9fӆ%/C# gJ?Ѻp)0fi1ݬ w S`ޗL>go(W]7!7|2>Xm}O}Sӈ>]VRJ-@4m~Ǥ |ӧ~WZMo~ybE _޵1Y ZbTṟXoݸ֐hI5-G}a8<.׆.-n>Vn6\~Gg~ػ6nd=60P好W\pA0$.JNo+ɲ#ɲNFnX]rH|f8K>:kn?b |=.R]/~ŲH2M.l#'+&*ʤriYy $J' &QZ8`H9#O>W9*QQ&qL9YG5Jǘ1eγ,($:x{6y#\~XbpcAK6VA(-JLH$}E$5P,ƧDO@*!XX&8).t2F錦gf{'ܸ '/Zmj3_Ic2eg͙` \O=A ->ҿ^;N+r`Gh\ZgS\kX-M"p&m.@ծhvy5yK{WQsy_qzjۛk;%l \mX3T໚ƿQ:cM&MѨ=aƝ,~e&ɲ.Rmܧ(l4t1^5P\F3o<\¥+ΚRtԬF30߬5g_ h =D-gop:ѐZضE d.T\%Oܞ ޞ`NhtKdUI;_U4OMiΟ =E"#C-*NmRJ*'8Vs!:d]S/":@dHIxM9G%.EC 9+=JQf{@gYrB{&J 8%f"#␙&Ύ=L{3tO%;J09 b m}QtSE7U+7(HDHʨ,_FǴAl52mȜ؋68]KԛNKH {~?cknMt}&K#ܗmlS4wYVGcP)YUTA| pf$ul v$;'{RZ^ױ,ܴ$'2OcdLѲ(cYRA敩hc$)(.CJ!jK:D^s%ܜ <dZ+Ec(JaYHA4;ORgit2gn|GVf[uvn|_1690,Fz2<ҤSr=6sLNI\LP%sr#Jϟ鴸P46|;:@7QLJr4 eSy6keNG9?J$ۿ? _ҝC1u|TK?h3lu%=|(]cI/<]m $`%oL AbLb RBeY=4_"Zu- KL6K%.Y)]+m^OiԑODYS]֩YYRȕRV`$y2mo`r@SR[eT ):2Y^QXKtY"C/ 18y "/:J5} ."QY'ɩF 3&x:e0aCAc_D=#?"rMY59J ^Ƙ&OK^d˙lIqA;+~"w1cZI$%rBt n%ɓd\Ų/gψ؛8;q4ԁpYd.R7-;n:IaPmbҤ̜r3x.A2x(x؛v싇g<@Xbw- w+gxS|Fz*ZcBit~HTi²B(dZ!&RO(ƛȞMV zF%!X>e2 ȅ0+X! q0ꐲ{E]}Jsdw6j3=vbO96RS@%d\pˬ3h ˚3<x!5bnUU3K<((2$)A6 \+LQL!KFܛ8ufMI߁1Xy)?MW_ox1Qͳ6/9Yw[g]'|Fg?\湜\U~.Rj}}c ƺzJI$*Mys{8kx@1y8} -ʛVrG> 129 "א dsh}0IgVq<צc hz<:}ӤaS"hyoCRigSLJbp2bF! z<6GS1U26so^ͼL HQXZ-s$9%u\"aX24m =`p c;XwQ y<Kux!tQ6//E! KシmJ,#YC"q٠ TY _C'`NT ffl!)RKx?hx~} !U _^@_)פJפ.7U#6VXͪ"CLX[*{s7ףEG7u/-׸V׺hZ7k֦HFJŚI,E¯<( Wݒ8FR/m)ԐN+lXwqxuAǟ7/oO߾yͫ7\7/W߾YS41lAǏ`E/UwZUܺCZyR|e0.ր|=~E6݁tv Κ *p]AZz3/]$O/@vweosGBr5eV -uH9"QU-%.lj]4{rX27 (p09 ]`W&w4#$l `pizh n pI;cmhKFBXҷ~n65m˰6OGLAY)1QaӿAeuґ(yUVbKR)՗y<$%È.Y3dWC1Z9dYFۀR9y"('痕\H!JX. xz7޵5m$S-2rJ#AC1 oFLV9czO]q=^so쾺܇&] gjEo Ug=T ALWVmuW ԯ͚ߎB>4-V峢 ISS4A {_2~3}ޔ4 =`Gy_vJzjoo+\]v w="^O}薬 =ͮQ ҕP@TPEeThBtt(1#d)b`߅o$\\>gvm

0V5U-n%KV(ny(VdE҃,P*CQɵU1/>g-3qnhgGῌ EBkKp̚ 4J@`#%;7.MS~`G@BLVk6 &I>6|:Zi1'w}mt%ͦ' l4˂XL Wl "I S !DZWX 6tGVI"sl,IJVG)mom͈lU.4z/יLƷ^4!)ҚA0s@F'MV= M"e_-<)|ͣ;8cML?ut!۱uدqd^k q8~"ҴCՐ7Z/.Hk6 cqKOFy.,Kro"@I`9wS20^MPHh"6Ҧ$Pew<)ˊ)&УgMzaT#uDlӥHE}4w|~m.=b11wӸS1嘿σZC̺9_-WmzzVw<NUM}ɳFc Kg$^7DlbFDqݗຍVѷ/7+i'G8ȶBVC[uU *"Jz;atrS];/ o欙ks:Yw9﷏y(3t-Zm=y vRu[Ͳ ]ؾ~Ӈ )GD[TQ1Ԡuo[\pc(keMJDڈP#E) H"Nz0 mt 邖12~s~> ߯|6) tN)lT%3:z ފ&EUpWσzM*+2Kk3jMFt7kl/nhj)]j9ZtQg]04oi*h-&;-U_g%p_ppoN%јh.)F$.rv_@EhLX+bb.RtK$e A{/eɣ,dEGFRDD“xKhtcdzsQNW=j3lsEu,l=ȶqk"${A-}8.T{{mpC#=XB,,,RU T)b bAI1GЧ]}14tEN UZP@q sAԥ-MPty*\iTB#}ع psh./|  !g(4R:X^|Sj%g9]0W=h@ 95XeEH4Op8rf;wB~ SN __3u''ܶ&'ӍMfGE;`O"x2-X(^6?-p}7ׂX;i9շ?Y}g_~3my6YLdt>:˫xdvpL?;#'gz͵gƽw- ?.><_ V,>zN "/ /W!tTև~{Z5dԙ3iO~v؜ ]O釟OO)ŁH7d8Nϱ׿0=8Jq֛Sܥ:0-- ٴGεgVi^~Y~- ~4ՠ>4=\ՇI7/(T2HCyo4? _sF4xpIa7Vk^jC5H)oNN49ύ>P ]EAX_ƋpɬՍ世ػZ0yr{itύޝ[F﫚?óg]-}IO1h͡V9l,kޜaBEV#K8-62/:uʫBoVm;V_TBI^ospug׵YLoؓqm{m΃@AD6xVˬ7Ѡb c cpI}םx0\˶1G>eIf1X~+AV6Բ+5 -/&i>tjyհZFޕqlٿBaj_8 b$_sP]gTH 梦DlRw'K[έjxW#eci*WVI[6ֲ6rڥyQyZ^3$E9WVW}ݼ7K[}0&yW%"OΗfbU8;v_xzyƶs9,38Ǵzͥ0~z9dk>v@YFp%EdDwSn)*E4!Y;p3l!7@/|jpu(iq$%!dCoL@t.$I2DwC ]>{5t ml5SKδ}Zp;Ψ ;UQ://2)2Z[ʲY2BV/D(f6=ANW+2]5kZ7ZKU;t %ݢ+рDOW6=  ]\Y.th:]!J{@J$B{&th:]!Jz@D2n3+le6tp9F]!Zyu(k3l=]]]II]`q$UWWҐ.!O[-F*aT>I(7_|= ;>ee<J0x_pwU-J`rW_mW1 ߿b&/0T萊R:~<)J w(gvl>} O?k1!PTÎV)ɻu- %Yv{ &D%>W ~w*l*c`=j@PWblfO7%u ʞ=4 (%=]] ]s눳E!|" 5v%'/,:'um>N0zu|$ToM/^ -U#Oy5C);-Еئ_B)͆.Ut(qfψe:BBBWVͷtu9tŹT2'gֿ 6kh.th ]+DtOWHWsr8Wg n ]+Dٵ^ Q2+Zdhf3h9 +E4Ki aMߧ2jלBJ%‹b1WKiz>=`?2juE6;- 0"y!m.WTGӆA߹6 C6hI}YEVNBa79-NȖGtM+TOlQBKVzJ)(}+?oWO6-Ko!s^YUXRr<B0PJkӯKbUVsw,؛<ՠBg=T66OMz J]'ap,#l l&&]+Di{D2JJ3+,B@*uh:]!>h"JENASشAK3BWVvR6"h~B3mj״ hT+ՀTOW6=Uhǩt φ.t()8ˈ/Ith:]!J-{@R3+t> Z ]Zh QrNAbUד Ϋ+D)uOWHWRg_Mtɇ.F]!Zyu( slRfT504~R BM/! !v(암V*J4!|GV ge.R7h#&\eCWW\ yBEҕ1ZQ]!`k+Y6th:]!~V"ZJʈ02B:"@I % 6"Vvafu;4IsW򖶿k`wt=]Zbr+L%Ɇnk 4Cku P2z@ n3+{5森 UWW.B Kγ++i.th:]!J+{@ pqW ]ZVU3tutѵ\؎oҨ^NoТ IPӇgIlvid\duGz'˭wOh5&rJ Jfz=ʸVO VeU\(7zk*xlX|hlm[;4B)I?t4myEGW3 ]!\r+D!N+@Yr2J+S'UM!G! Jc{f0.Usv+r ҖWk4geL2=]))tFt!}ѽO+DV_3tut8dDWfpm6th ]+DzuutŹxS ^3BWf(k/{,'l 2 ]!Z+D{gJ *mNSn]+kD.thuAD)tOWHW0 ŹY'XNN:AK֢<#?L?Vyu@2*-I"*&Jq3aSLiO ^vD$`pIFXߪX#LڷRo_WU|jq0h1թOo{:O.4 s0mld}OV'{gxj -(-0"HiNGƷ]Gޛ!8(Lw2UP2$b 47#Yw5`׫ 6ywf cIz[3޾o@phVGF݈\|_?q?yly4ˋV]f7P1:_> ?M@7o . w O^~7ƫ^xgKM1rWwxq>&p(G`ay սi4͡;Ǣ:<w67&n*y<ű >`סB-AK\ F6m~L?M͏%M\͈6v1d|٭A Wx.o7'7ZR??~]UͶ}Uv[@OP=Z P t/Y Í6t1JON*3HJ+צ҃2I[ⲗT;$՛ٴҍtث~9^l ?jzma9N@lTLb 5D)L`VJv24 ZI]F.c4: ." >l+e2ΥܺHI[UI$%aScZi+Z@\WCT5@QGjsA5׊)Km,p nT*p²罄:#wUgX<Բ++J7Z^>qZ^ xpM)4z4^b$1fk=ܶ雡՝(mwߟ=J ˆn4_̾,cgp 6&p2e!F ÈZ;ڻ-[̃ 9s7DP%ǩr6Ht4A% HŚ>[lc6P0tJθV%Z2.eNPzbNYH 3gux߀3pUk٪0a*_ڳ9^J b?yހY`%J n`Z$bԔ!ă\0*HM DsƁFUYp Zp Օ-֐kCy *:lv)K.:E ԮxHLzʈ[JMwA U(vI27[Lg]bv+m]\C-=4UBIq;vXha|7 Nk85\.QO <\ީyFn 57L~QX?,\ʨDK$0F)'TdG#*3b@9zgsZs9y vMnߏa{!u_ۃ{rq\ :.Oubg6 `7LR<KIc,tҕgJH\ABD ( ԇLBGLIǠTT_9tTrB)(Od?Ge=wŋ;)s?Φ`X2)ʤ[QQy%xQTL R+ 2AoegդDKZS#6p ^#gJIa+ã  άI@ræ+^9ʯt"xP|)l}׃v?b3VxE^O]i :"uisMRZ#(II;se2>E]UBJ^u8qv p-FgZENP҆ ' =":QU3SA?=ʠ0,N2)T:jbƸTK%h@,1Z+Q7ZK'Lp7VdB+SZ]FIсX4\T&Yt*(;y;ҾjC\H"Z`=Qd6Hβ$e#Y,Z3hO@WuZIkUKq`Z?{Ƒl qlr]:vbBK""eG op(Q_%LsfU}uϋHC܁[jFG{?w6,A 8JK]җr:MdIz&Sˠ 1{~:RxM</O27I8hϤ(A&8㒉1;8FJ.N&&S2'iUsn3ԍVV9ƒxMuiKq9=~8l*J|x:GSle y)X)ѓn[szuY8y8=Cy)tESί֜3NTiǴK(Ue?S37^kkyvqs"8)S aN0Gdzg+.Hh :YSoqxpiblLZ-^>ɪii,.1 G'XbGAO.knN+rV:UWY'%ğ:R&ִ|g_W"/fn1(=55&ovpvcZwoûׇ_pȅ;|^Ӯz *f&%`a^SOOT[S|iobj-ڼo3nW{C}em?\쭵IoI(oߎNƅ~Q^\T^0#$6oh.QKuK '6!0݀]zRp$좏G&n'ݾMOҌ2Y)Y$pb-DK֝~cnȒ}'=tanZ{~Ǡ$ 6}{WN;A ]L2UB8Y%' @pϻ:יwemc +u/(Lx͙ߑ>ow>ov%g Rw'ukκLqi^B_sfVW6z QdZ9)W\C jPړgqz&ڹP!KBDMc`B>Iz*&霹VYFq%' f>Kkogbbui=JKJbHs6ůߗc[)b#A=jqPoyOjT8B^ \mQ0pNV Y1l-jB+srN[в>~rŶ[M|M>}sδv];|KnZ%$`rX, Uu?%]uGs+Hќ?E j=GVjݤ&t1J=k:6SLY`X$rG̩UЪO!nv} .Qf$bg٘vuxp* BYyɔsp`t  Y;A1y/6XkszShys qdg#[+F")U [٬Kq3hW]{յXGӹܧ5~EY>g鬱O |2=~EZ_ɮWϒcgjۉbKLd+Xy+QZV#Xg>wٱ29FEZm AEOR=BNdin6gR0YD|inuγN^ދ)ZЫ,$6G[LXUT'-W*'T2!a!i!'$Y .Y +Ɂv٥d1g3ٲ&ΖzE~ak*ޗѺl<ɨ! Z(3MYѻ u"d K o v?H AJ.Er, Q'rfC$uP5}Z77,N#TavݲIP.tF5(WèepzQF+a GKCtlO6 Avўz\*jiY;=5_r!RCfsDI'$tqg,dCJ;_sB>Qt%-Tb*+oVN'+OzsеjϭF֞<-:Liӟ#*Ȍ6-2yB=t$ƑgU~  Q36&]klv7*GiBOե4f{jV+ꔚ+vPG*D߸9v0Cǣ!=bZi޸&JxHh["JI+ZU9A B݁Ax}-p#Odqgh#Y܎dqsF.Hew 8v+ @*DŽRzgʗr-@d+njTBYy)2 K?%бR]qMr9r+LL&YPXbV7o"\rgGeVv1*2< N~B[0fFsR RdTC\9D6t_]g~^JAS2[pF5ȸ0Η47sC&Hl|"EcR H.@Ӣt,K,-WSVAo$FM]8d,ѸYBxQ܊fvЈiq+$U3Ä1`\pZK37k Ia*%dk|6Ľv䭐u3:4=yn'{r*ir~sZc1iu i,YV,Oe]_|[7A #['̑:#g11iSt5! V\-Ӈ8y<ַNW(UhBgm6 u+PԶgBȖRF(p3h9d>sWYt*f<$ K\YBu+@6əXJQ̏а=]-ْŮ-:?>?9x?&`u{y˯REt2,o"c~3٫:2#|I-џWqp\_ECmS})mٳl"ZzBZW`xAFIH*FO19+V3ku mg2tT,3b ګF;˖eܧE\j)/URN}3UQ?RtR֫/W/-Up1pndr0f;lF$ nyoԦ .ڭ~zsnYm/P5 <&TL FR T1Ze+~Pߏ]g^a:?VÒ<W[_ƣ7V`LTcqSvG?JOl}UYz{Xj?wɌK}폌o\-e݌V%a(ZZ GMYi(+&7ڛZ1f2i\2WUGx_,:돏ױXf#WOGJ,45 [)-e=!}B[k_0'by]Xl⸿*ǺCm,[wXi'vgE&}hx?]y<̦o} +3!?- ֗>ˍ/Wg&mDsuӧ@=b)NO\Lԟjء$ /b$L/ii|IA,=:pa,4FP#5ޠnTl[>M}BA^(u(y TĬm-'%)6 >RQ6;kA&~(.f,VHH2XLHB 5 ͬ-~t !aIKu6mWgvʏ:b,FP!e2YK@`XMR Axl* $SOJJŅl)SN}a;**qagq( \e7MՌwԠ[^|zq_6hd2u2/pVօxJ5N;gYi&Wm.2^ P" Fv*{>IBJVT6N$]¶H2%""َ383v^-TX38mǬm4묱)!B+lL4,T-QyOSFآY!/1X2kPE&հ/G(MɂID63y1vq>lu Jˆǡ:fD70ȆYt6#W N!%XK(NDZ% 6ƀV w)#: B %VAe<RVg 6Ff::0/בKLJE-E-^xcS # 1"-kQ$&UuNؐ="BΗ_O;CPu̇0>| 00LJ#E?Vg M1_EQͩɆwh| ] Iۡ ]х=Y1KIqҐe I'X3֔\6 !b7ɣc[߰Pt*G6hK`M9QFFE:ilc3&v{ k!:Gf7AE/TY~l` 1ަd#}ȩ7gX<6Q(̏UNr!D,q)֡PbX,/XN޽- X_Xj tF%H>Y>Lt&Q*ԿNԿZai3ũ@;Sqk3'>sq9dL2`DzSRV۩CҞ+uOzF {| 0:2]qj?HtJ+iѕۃ@WNDs#2/th$ΐ:GtUK *\}+FKB:]U:CFYvU+p{tUQ* ]1`c7tUZ ]1Z׮*J:C2+b$铟){cUŒh_:\Ei`ɝ%GHUDoZSEnUE?9ҕ%Q}ҕ1Ѯ*\ۛ)F:]URtut;bN'h1Xњr0BGFXѣ@՞p:*]Ht'ʭS+Oeہz]1`47tUZbN*J-:C:{DW:!moEhM,Oh &[̧òc0!&5|cs?yh̝\=`K6G^7cM3M1?MNuA îiK4e/-6z$ڒX*%|ɇdLJ B"}] (wA?؇.Ĕ4" p<ҁKD,,y z@P^ȝh զKϙ%TZasYR!E Zg܍䨀* | )0 DCksJ+qzyys*.Y*1 @N`fȀ*& g!s&YO d3OcK3q<6*DYJ iQG+1x/ %2g@xDY;-eR]N; X R n6zpPZ*&X:B(ͫ OX* `1 X )$Z4XLզlR\=D@`1zԼXt):Q} ]&|J'{XMI1!K,Xe]!(dGhOM6deGގ`f#_f| E Uk\4`1[ݫ N6h N@kah:vN#Xe~0P&ܪCZ]HעY cMm.tt%).4hhhC_WoTh\s ^-FU5l,Xa=Ҹ1 k9 cBE(kGU((|*¤T)⸑1vx X-V-tC*D4`Ԭ4 3*0 eid, |Der HM$P **ӉPu5%M%$`AuJ+j+1v\ 7CA a ȸALAAX[@( `5ePDdBEA[(2#χPʨ[sV z,,xč:L;"q b004KPI!0ά ::- > P r ֑tA@GJ LEsf+%RI9,XT5oFmY#<;"JPAw/uU qj2 RVW.#}eFTGU]%ޗTh힑4F^n!GAgHH&9%R,g%EࠄjL YV+"1۽@VQ=r+ZQCk>84鼐AaPnhrnЊq)4cMDr|T1&PT1yQHa8IB1M`ߏ!vCc⍡,nכv3д搮3xyW TA׮z|ou/ۺ L}6=DF: xs:py>n:@CVLAG=$]I4T*#d`&SQdy֣2E ) >@E&rZ5ȼ`|ڄLktUUBN5~d}%TU;+QgAKCj)M.ڹ?<|m^,La{v}Z:L@U ԭGwH7ۭIFOB=V`&Ǧ Nd4^UۆfMEѽT$I"e56v(EBoo9̈=Xe9n(!/[tCۚb樇 tyB1B[us"Q%\IbLvP%(H :Ш3) @)w!>֢7Q1l]` v7߁yE!8ij)ڥA$\sg 9,RDyE aLw(ƈMnƢb$ӽ糞w:H1-и'GJFdUA?FтΨ19MAk.YJ@ZXf=ؤ=Cɗ U{&dd, 6#TK.dP?uN^oY%sP׮BT4|ƛւ LW YU=ڢσ>:jNàe+J 'zʤ辐?Д n Fgk|T(G=ZMW:PCX2\'*]k.JC` ܨ:5Uti J.ȎYxЬ*/"SJ,=vR@Ւ0Z Y\ yۨO]gF358F'TQ~ԋBZܼZmvKwkV0` 8.97oN~Jjt kȨF+L9ݯϴjO67dM+7_-_67m{uoT?ޮ۝.'k;!}oWpy/(|rvgSCg%-pזZv)b {:q==ٷGi^_ʿ++:P4n7Xӑ6~i7W郱V[ۇ^GS?열5numBG_0(s%F4S2\PtLpE.p, WN[6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ'6\8)Õ O Ә+0 mor\ WpEwpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ WlbՓMpQ~:+5v2+}j{+7]iyՂ]my~+K^.K4Ԏ8f T|:K)*]"A\Opm3n/u1VAGWvw!wo >\TB^Z# K,{f\/ѝ @?#ò7;_ﮮjo{ұE?2}b@őP'TEY=={n@:9Voey UѢ[3jHK6(1:˶7C&MLq@4aT-<"Hg1a?oQosu?̎B#{;\|Iv-~i:l&9_|WfՊ^DʾTh1nEg/:{ً^tEg/:{ً^tEg/:{ً^tEg/:{ً^tEg/:{ً^tEg/:{ً^tOEwJ=? 8c.܋VKJ؋ 7tF pņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ WlbF5\ t0xś%襫޺کa[Z] B *F=J adaH?_? (Þ^ 1!"ANz1"VJ'#]Z 'DW ]\5uEhtUF0!"NOwS+B/J)՟zRXgtu//LWtu^<5stu/Rˢ+uRLWzIV ;!".NS+BͥPKb,t'DWtz=(ĥҕvN7!E9"NLNW@i`zte\5?I?"*LNWgHW+T 05+B{PFHWNHcnӲm ^ D DC[VK*!/s\Pk9\=oe1xq,ZB j~wu}W{] PC:~替~BS2`CNJsGh++ˏ|lӺf'ٞ[zu>uc5(3vn\_|WfՊZ눙sFZB{w/[Äd{&#rS \2<)e);1*~9ꋯrSC&ZPDp ]m?L(/(ztNNbgwM2P^t`"x%_]rO_x~[^Cyi=J3]=ew}c +T>tҕZ9!`'S+BkܥtҕmJI5"z2 uOW+S`-d*;"OM+Bix9ҕ1^Q8Еz*tEh]J!]9!ɍqܹUz\7Wd(?nXJ,L;pass/u]'! x6Uwfc 4쫃IAkb}P"kQ2ܳO۳ɱz+KCWM F:DoPBPɶL"->Ǣ׫Ek[î`1{NqCLIM>"mf&Ψ[xDJ/OY@ -VΥmaqJ-xcO/9xaVwCu y-îӦtwns$9^}ӄVԒ#A:9i/L2v#tّf)⫻"?.wsiqzx^7 O]P~ X`}X}đ[6@m r'B%b;(zwnsCn{v:5ys5^ټMVZӦ*%wmIn(Y|JFnKONp: 3l5e&^o;SSksH3梿vh5\қ W*bf~ _$.6"h?V4ޝW.GSC-7Y~- L}9*7wT&eG7]xdAlX 3&po!KzȆ S =͝.DKc,5]la,su"yLfF *KH^ j]+95KeqxtJjg2!a!"#wLEN1uМ˖5qڮxfV+.EOaKf.lGX\33 +""ӂf6Vqaؙ@&pZqgϣ*D#Wd R2|ƒ@F 5 JK͐d:Ԋ)oh:eKwƅ9|ϼw\o3w]BTIA$I& ;}FQx&0*#ęh 9y9VgG&1UvO(f3)rZɅο/d<lFP#i0Fk/13͚RYpRMgmeDH _7_k*P-nBgntZ{9j2N "p€K{bIE`:h<`S4aZ%&Aeb08推IU7&K$=$S{ώ~]7\]̓>~ۿ,E!# H~ a0F1^O&.z MpycjD V_J߷LˢgPGxZ[=c?Vs ht=ih6v~JN}_ыO"YC8 4zYm"k o7\%ԯ^~>d#vM.z43ZvfʿRu4I89HUld6sohl'Cd3lܭ]6.{zmi傝w8!yt٤S*|%Rh;Rc R9JH63*g}dY@$sڊ/!mxk qi7%{{Wqt=H+7ݨ+{8[rڻW?73|7n^EO`Vvo=+Gh5mXщE-4Xm֮)Ci8iʞok} ҽ>1jDZ%>\ vgv3| );ԣ#&h۴=Tj0ԗ-d5M%AT,`" w1n%9!7z#{I5 )UNL. JDY!1#VSwRr{ QzIJf}Vd'(X{\"K5,ZNݵ&]/tB_S][sՔ4s< Gݕ3ՔQk4ϧ'8D/龨 Z8&ZM{bI>}49:xI@5_wo }n{+RYS9HOM*,W;teė=hqZZ[ZZ8R0 rZcLIjY ]H:6^cB,qoX#VP"d.*a ,$iR yJ 8*E[ 32BlM OF>@TJ3xҿi?z5;'UL\.m9視^ كT@1wA2g 7:&ѻvc^lY2ajdi=/Er=>!(Z!t4Ȩrdm3ZgoY',o-,ϽX:>KMn)rG\EYZW1YWfh0ItxxK[z([z$+$+N&A8̇m]oa jYEc$d,ɤ4GQFyHqexR1 x`d'PEf-_<$sY1qsLH30ir $g?iIҎG~`Oi~CO LDSn }QRCnO&$ ݠ%yZn TޚE:J&оeF"{gI͉oK&!)Y-EXϢT)KlBUvYSs{ɀ2RlW4T  .ƖhB.Cb^9 HR g3@BzZ1e7$g" J .))rZM2ZD򪳱 N:E-چJ:pG2K>S:yQ=PiT,TXivDZШT<8W)tUQZ9VrLCUOEUOMUOF"deD" C% {ưۀv7@sGBrydxП(Y}2%ٙubr.hM u)nv,5'&6WW85['{{_g;t<2{A5,LSKQWk)j}5,sS]L:Ƙx'r7ha /a6mqG^tSNnit-9v3Eyf^ƣ -@)ZIٕcU AVA gouTxYDg&h2C !i>[Y\rgIӥLUYY3VNxNqB[0dF&p0E)2((Zgz|0 ^CZ|x.'j7n5X:ՃꩳSt QLRg /^zIqdRJDztqgU8o+۠WIMhKݎ&{?3xO\k(v-ـoJ\v|C^d_?cn7뭴U)W)X(l񚃕g궕yDP^-w9'~ϠЖB Q OĈEQ6dB3QZ>KF0yx˧Wus _c(/ߨl`F >RSNkmA ٫W.J"mRp B57J9di,yVXe2BuٙVc&jZ /"- !1e+~f4ge S ku$k"drt'e͝qLwNW40gu1okitc- L/vj\o.uxq˫[ą'^p;RYN}?ԭ~UZrSTu,흜o;R1դS%2W+KZ늩&W Oe"25 YSSn I(&79 s]\zӮԬIs0.8ffDV%"NG'X@0,bۍtpٻ6$Uw^Im bBuwĘ">lݯz(-GCj|YHKPn~H6ϜiԁO hq4Ϛӏu134kT Q*VD +ݒw}mܷ 't^wy!}qiyVJZ{."0ی9IXDez}ޣ um9:*bĠa6ɨQjoڕ:ʁQ(S\$d\UPQ1>,:;Ik^~K"mESʂZm%~}'O:Fua=ObU|sLSY!;hC65Oɡc,B;B"pjFlN)@D-.e5Mه\ ,&ÍεLJfF͚V q}u M ;_dRw,Gݓߝh40_ƖDA;)3^yoN`AsK*$΅ZP#*G}H'i($dB6%&Q l&ێ9HqYƮF;Li];]ez#8J 9n{,DZCQ &s0QMWWdR+asD Ln9יAt&i}mz!ڴo y w؛qkpGXТsCRigSLJbp2bF! z< 2GK1 EGY l[SR`M's$:%u\"bޑ*$$Hkv{El uCy}zwr`GaČZ+KWœmEQ6/5QؿoIJoX2O)soڔXF 'Db @A)!:7 gԜ"P'g03f !K^/V, XO^ BGF85kOC@ -m;JჰRb /#&Z f"i奫:@|WN*0;ft?ϮVGZO& :,\y!sv=;@4?lp "YէZMͯ^7?fVgѐo{4grhScAkq Cs};L˳e7>n_#,/^m8e]s<ws+n8H?.":zrG8~>V}y$tks-a6 bQٲd<:[M|p~iw*rרkzY*ĕ5i,^HAٸz`%^ctS츩![w;q|֠ o?.>~3Ng]SUHDI[WMtu=N&1?DA\ޢTyk/V8J {} b.;Iak NҌ{%Z) Y pbO!ݵW1 pz;s;V*i=f:G2I>Hq,eKN:UuI5x=Me4<u_4|HC*~KIC8c[íWG8o>&/j`5R(r# S(eZ["n]PqwHwx.2|Ώq-෭n*A=xۿOR.瑩g xɿh"Ad#U̎PPNvM7݆2QW_]3e\h,FIY1ۦe@+B In ]{ywx)os+\v[9<%Ćq^сՙ{PV11M+瓬ۚ6Zv- mv/=z$%ӘUy!6*h]+si]t$HEVj=:W˽+_-|܏Ն\;f.Jт)'˒5,t$'rcꑓZs!(=`be}Vd" #X{),X&쑫F9 G{6^,%}lIEg_kÔzҼ >zHSq-yujܦvuh3B3d^g2=f™̸(tһh-g~8S8sqL*&$JYS>!|^9:*:lf:k}w7u$tЛY |H N!2g@Z0LFuwjs3\/]c#n<ϥmׇ`sڵU Um=T Eo.sԺ1=^}$ꡚ 0ۖNOlEB\lJbBlM KKLO܌*pw/R+4@lNgC0۹JQE%Ya܅e^*AWx#@ ' \wwOvپVv1XԃMч`kqIv R(=<P cd^#K'`&d4)H} >ޮI8;9=T`u7aɄ.qD[IX&.7#ccO_A>͎u4t!F2'iIF!Ԫ7jƨQ~[۽kYe _| |8 ,gI`@)pu`@1f <295̒GOqnXjo<ߴBv4x?Xm.NLìrgN } Hɴ;ˬ3h ˚3< x!T9p[Ut,fx*)5 }V1$H;涍$>%<*س}W^~9\$J$Q)ǩ R42GiavSٕϠzXӌkark΋}70n RE=S[ ^ ]09b\'WiDfʈEW=Uޣ+JHE1 6^tW8uu"3RtŴ+ %z>+:$`J5~F4ҕMЕ-zlDF;1bRtElbJ EW=ZQRvV+q@-(]WL,ꏮu+A"`uE]1m(#bUueB t!R"EWLo 25EW=ԕuZu\ж# ys0Ԡm=ZMco 6XUTu8MD3[u-Ӭl' vUfRZjdGnqT&leG*QɭvW#g|Jk=0*ǓN5x@焿65`O/.j8^y2`AL{*o`Zsҕ^> AR qfvf+mMޢ*/)"ht"3]ʾ)t"?] CXt]COÍ^k 4Jl8+_tSr LV8JWL{5y(*ꡮI;#'b\ic"J誇hBPtEу]1ƠEuŔdW}ԕADeW +A]WLi誇 pI; ~B]1RtŴ+X>)=J٠%\:sߠo8<-݅=Mj/vvyqk;b6hIe%1$sA؁r'֜Xu-W)0kg$[>Zr ,qҒ#Z'gҒeKkLKr^VbəXxQW+ʋ㢘imCҙҗAS}U4ƀ+r^E1ֆuŔ†xk?h* WK5n :s]QM]dž,X/IW+5Q֫uŔYt]5AK= 7*+EWL1w]11EW=ԕA)'HW | iip%(WtC]|A"QWi 2 Q:]tC]Y{;m+(FW+1ȴ}vŔ֊+tVfL^i- U= Z vcu=^Pn缑+C@KԮt"1o#Ab gDF1Ȟ@bJ,X%HW{`Tb\-FWLkEϔ誏>c7G-'b\io RvJ↡*gWI=y7 x4p(/>ULU,zl2k銁+`iM]WL[cj/I #1b\i]S6yj/d{G1bhwbJ㋮z+U ]prtŸQIF)z+ޠŹH) h67ޖ t@Iժl#q$-y-S`PƵ %%dڠsO 2*SR^bmEm>;%T_|w'1tvF6/Wz4(ĸI2e? )C2eE("`0rF!0Rt+A]WLiʠ>*銀Z1b\#fz2bU8yKe p`29r ɔ&uTآDŽ% ]ppJ7]mSbʬV)ڗb(HW Q7X)BT;b{]1%bUuQ+ t^#EWLc"JUtC]Kv'j1bZ2>bǿ ]1+ƵZedJz+K> Z.;%Н06hI_N[ }b<]pN+V$g`g żg]/k}Rƒ70oZC+= }*]1SRtŴ^+ 誇VBI2 (b FWL!JPU w^]1΋i]㵙қg+0p x$JJzEF}^]AcCXA"`m]1RtŴsQ劮z+4+ւ+ƕ]13)]PWڂQء+Q֩uŔ]]EW:%]0h+5b+u7R{]tC] ]v{1"\TF!w]1.꣮x &Srr'+=HQ͝AejRY6Q֗(.Clgi6hA;h'!ewҸv_B-%G^LKq3m% Ab`'GW]aUtLA  r[-~Nhhþ<^yvfF5HJ/?Wt${z]::;MjWfGlQkw)mQ.:Yu39ojp.2x5ȌHn)Epa^ &Ӌt+}Z_G T]Oq}=MMwf29WiMzrZ~f+t ޞ翵Ͳy+kkYqtD {澬|?r. GT&ǗGK޹Ic6j^׷?xF׷vyEω'Q\\<m>7p=]x7Ԩ>}yDgIO3T˳ه?3]^n./xv{v˿ 1ހ:1~L8JsN9}SP=bkz^R-YTTϛOM}ƿɟtί|==`m_[Ȓհhϗg4C/[4q2ݤ52j8v1h2Tso:hgN k&rxQ]g.H5j?Lgj8YrЙ㴺Ӄjt8E,=D|-6s,_Gnkٯn^%oyjIw _P@.jU^.Lˁiyh˝ceuBL7_XixRW9r:붨U<Ԫ6Wި'_0"n]ZPl5RAy\WקFG {1\Kק 2^%%} EO*Pd"΋F*f3vaڗS:1:~nr5;L\gvE$O`Sǩ<2Bƪ$B۝̸s2ũ: q]_KM;[I!Ζh<:ឮ60Ɓ!4آ2ѩ~/nn|_ bOϦ~[TAXh=/ 7:=s*DYnjZ R{y6>}/RC0Ac͋ . FWreZ]{`ئ2vLa|~F#VkSkk#ާ gk핼{Π,=&Ýׇacb0~, gՏ, -ԡ1d&GL}Fxij+y9=Juȁ_^꺙MϹ~~ɯ9%f45s3O8s{>7}N>+y92M2[hr!:qYݝ?xߞ,. EXL_lX?_)ng-+9p>DxBER`30 ͸q:֞*5AqnF3C.R3^Z'aPS7c#KWo4 ~걎ӣQގť;mlqפsu̳I15(jPzJ";O?_{I"b<ڦnyiݚ~:Sv|xTıMutmƼ`"V۳S~~C*oa1ĩN/E\~qvIgh۟q)ް̠B@)K+CK&&?{۸J#O3 k\,a y38(qڷt:8o1en\Kq3 gƈdv|*&Zq`(aev%Q\aZScPǰ1nkE1tTNʼnc,Bw%a&-zlX|}`!/NF=ڹ8ht-?τ8ޒ`{ձ$Dj0nsVR8> ͡$ϼK@ NJkYhS3-4P([oP?-j]Ci] ȾhwCf^ofT8HLV䳏a'ܵ,Pj&`5yMmϱ2,x_  kyǽǚkR4’ 5K\T, {CkYC^rZ+;PQ*1&,XOʺvs"% m9c5E gKNg^ioc9-)2agQKɔ $൴ 0t gE'.] 5D}fy4RR{G?p:{vάXjօpB0dccB $7RI[sNѩ,Mu)PЙ#F)ᵆN1ӝ!9Nz|hVOeɗKXpnM+\wEOB8S7=]J0oeFc)r$S['PˍH&0>PU׿o R.Z _M>҂`͌cz@ဿQwu9 c.)*Av;XS 9HLބ l \:xu4{fyec<-̅姎e%jL;Hc6-*Key=\uz`0S2WK}]:ӢȜC+cqC㴎3mR^>3E"=i[AczI$VL%Xp)ɲH͹N($@F2E$3ee<~} c%}6˵6V:_a -H[^Jy '5vo`ܑI07Vq;Gj!XuP m~OUyd#p*JVr^}yL%]x%)/Ҵ|}#A~{/Z\UPEdKy'%ֿ~2iϊn9?!102Af62$&4f $@д7ڊ73q+zj+~Vh6/^"xSdh֐jZdRvea|Yy)ArTyL(XO8_ޟ Y,XfG >.G Mڍkl g\d?B%֞bF3:oB JgXgO:3ƢJSH6I!?Z2 A>wkXLnam֞V' n (ul8 ^~cYsPlWI%7cFdlřj~b~0zEP4KHӽcyaڢ\j. X UBdlni4Jx/ g卞ڋp6&"yMtODsשwɉi(:t̯?!81W.p 'Ojol۪ۊ]je>*6 18E  83&p&Bpz+;\t6 9#\*&;k(r $2Z+q][??c&UŻN$W"4g%O:'[O6eV{ZvVt<*-f~ cuo :wT`9vM0=޲3Gx?WjI}#*2rp ~CIR4[R 2@M0qO\m|  5lp8k[#7NژŧV.xssla% mؽ_>9HP;R:9u0LszJnG#ˑͅq`k3>,Go*Ҝ*_> ㋗:A  ܜsv0F3~#(P/-`:Db9ʷR_j@->?Lt EkwBlBKNBz"֦A<7i7QRw/Mz,xQƠ΀+ت`bMmώƛ-&}*]W2q 1RI?6 z;3~#D`HT!Vq0b;ܯchBwhhsb|3IK=iksۿjOf>gowY͔CeW^R#Gg?~ymα%OX|DZh,PJϽQwl)hd=eQBH(ASQ%"IE!o0?Δ\*^9WΩ~` dlKL}čb(Y83%H%$B Q1fF_;TT򅣪jU9 b@PdLZW=紵X5)Mܴu4.SZ5@r Lj̠؈QƑ&lj@}_;bTC5qzN&>h!R È++Q):#nLU:lcũtN!1 F8%:gTQK.%$n%De|ce^Ɣ+9I2^|eE~CGԦ%GPz~6nIQ9˾v|v&fVB(UB*QγJ aZ K H3aMyFYg?Vt 's$F+՝;7~]掴OLݧĤd%9uY[k.d_?='# Z͑ߊd^t[QvpiO:ܸ H}U2^H*>XoLE=GI&hLGg:]G~|z~^r"-Fc}+mu `#ҷolz_LE.;ֺYyewp~~6ZE]T=8?uoܨO8Ha]ǤFh7~Djޙց2-Ypv)7JE@E'LٽЩh帰u+V 84GķO.j= pt[TCb9#pzyGrT_]vǜ.kTBjm 6& Bf.tDCK<.~9 "L v,3ixs; P̿&(Y!D&Dd'oi9H*׈sE"~{eLPa 7XZPp7 O&m"߽1> UzdaDa<R h0D4?ߦ^!zCE4ģ)^?rŠ Q[W+XG?FqJ1ac%nsvտI{#slw%[_J3 t d>d-D$do@ mͬ&r]S[ΚdbUו vu6Z3IEO5RRr쵷0*<ߟ!!V2i>Hzi@e|Ph2-Yy v2GrZЦHxỎH$`1zMc/.1а״_P|9\pWI3]g?@"$m6'l.IiXDkоf˛4X&:YOfHM ǢV] ;C1RZ#Yz cp钩<7 /MMe(GHNݿ 87Zq9%>,PgZ Ҋq[_1c\-NJhG(fp웳6"%؎c|}Sq+dS`G&pS I-{\ 0ORw·a"yG| Tޠϴ;S7Ri& c5+Tr ~EȠ\]Q@Vv4Aq#;L5͆)M`{"sq! Hk)r#n<\4uv"ul &JE7ң BHaze QͫPJj VP*{hKN5)UY=Om\dwb& }<.zzRnO9NY.jQB/g,tȗ=@UNӅf1E5ZpaHR.sr[qw^MIu1`+C{_KZ **ky/M@Hj.48lKy'Af' +ǩs9v5aT~P|_纟j&Z$5Xr~ J],^G+,j`]BV`zdlA$(omBY懛Hh͹*%j}Oevb Znq.0o?U:dCMY޼:蔶إ}*tU:1 1Fx-mϳ4q%LRW iD rP%52, U)dZȶ X~lA%͞_ eqq@Ȟ}sCrh KE+moN)(Tma%))nrlA"A#HIShmW'ꮤ;et#Khkm (\ Ni J.*@ERUʚ",ߴ~ d[0"u_^eJhZvmBPU}J"e%m+`'{ꊵ`Pt. qQ~6@rae帬[F_Rj6;"Q;-v}nej׍64,()v,f1a{{VBI0ԋ "Om_I~I( zdpd#r3zDQn甲/';N6T,\L\e0]Hqqȸ`G)Om= 8ّ$(Vx#b(~}Ά.&YG|laS cX@OW^|9݁!-L>Mf4p",*- ~UJWe. 0TXdSpq,@F^%=ZoeKB)pjuVtG!اz =W}&IbZ̫/5gUS<)5њXLUu T!*1upN9B^hn<m˔NY1bKq?-tZ]?jROl ܘ'#Iɼ-jƜ^ xqgNo_>>N4r>.35#^qQrA_9oﺃGraea=x#T+,(#F:d_~㓲(%2vch;T?Lwl_mY}aט d=2F ?"Uɡ~$UV|R=Y7xV2FqޓAn4n'EeBn`Uc5̸?-F3P,WAJ){HGq/'MP"w k'yvS$gr:PPv[qñ89T+ʾGlH5ˡqB)/W>4GAP? 1h 1F*=wtAĭlr%NAZ-Hye=BH"nl!GלoT>j9ܜQD.\l̓ N^Ћa98Ĝ1POm΢0/+kck73w9͍r2sVE_m'S!ۧ:ޏ]NU9BA$1˹.Qkhgf\ŷ}g}{3EvAe08u搲ncD>;_RFH5Fi4 @)4 *hu_mM)6Yv7d')̚ ͔*K'dgI;o^(K(5yɪjOiQGȢPbAiʝ1 x9OkLոP2{^XԎXPM%uYTGu>}*њ{^.st4k_X&uC.?Ib'ĝpÉ0娘[dO0Э2kJ$_B<}ꎙ/{t%+l;G{(_JwEѩaD!C Lob(tUY QVtaXSZۛN5.+ m/mz]oZ0  ;@NqvFܟ2}'J95Nl.zeZlQ,=c9d^CH3F)1fzoA)~w3qnܐQc>Bm-۰zy[> \rĭr#CT歾; 9ȒHD8\N:ܑC8b3G;ןCdfTV`g:|Uh# zp7PHZ"j0à,X !k ׼^U.RyYA+(GxD *.~`AE'BaW/UB0(Kأ/?Q"ZIk.a4,^| 3ȋw8^ 1FݲCGwn3i%EIƉf@V+#\bXhZ8 #h{;G|eaz&9gI>+-5p}f:N֜&XG~r%!2, 9tMokM#.^3Bdsq #C^q`MI"Mb0D}9z|&8 3wқ)8"{2 Df*7}㡰yslE-oz"&.'&>HgҒc51gJ D|ZF~Z13/=B_|&3q$q鄟Kk5~Z $aѕDNo2 uB,*;Us(xnLVw$w1L krFR8.}gJZ>Vkkvgq`u5uBF?c/!=@գY@!"IsJ8mRb3 R׽fbJֲڮ Y=JJ$?_߀L4}k0w/ * f볔2%yFZmami4P.O-jV|jKQfKeB5#1.@R{ (Ä1~ 1( nL.K`L[La}H}$桞={-^[>H"doB5=bgr?b{JՇ[kK;229eY01V-8?ݓ3 >@:`[/šh< bW7l펡D1`|h1[YCj@H0[W\(.R;A )"(p(ze+T T܇A}hQ&z~c1w{d+y*%ؾh@ݕ )TL.;ͶNdzA%ci pQ3ZHO/CR5V211ԎXPM%uYTGWX.jG2aarÆԤ=z4PDLnBz'Q1鈠x7C+ZI]~ĘL [7x*v=rӀ9՝+wY*57)@#AXܼ]_'2r#`[: h*)ZؠZb* isP'~-;^][2{ko[ ڜ+:ev{:r.dR|QNqAO )O˂/ؒNGGF>Gze'SC6H7"`{^W'1n;*nݚOꖜ=2Fr en(ɘ"isi%0 h3(@}"890{~5ar{ddqh}.Nyy=2n-Ab%'K{B"gU6*dzJlQ(z3Ƙ7AUx0?3Pcl9QUŀjw'G-To_GȮH-T7tUɛkIt+8tBR'd ] ,N'hի%lNo9d x0 c|o/&)~s]q\4#%{30!LgNLOOM/Մ-= wsxÞoYg>qEbOZz~F!6$%Wsq *i˗VZUEKrrc5$|cm=&0F6P"Rpj3}Z/|+Ȭ`|5۪n"Jx sHWY0 'D Ȃ;r!46Iee:k#l$2 Fly̷<_AS&ͲXS{vIK~AH(3moe0`"Y/Ŗ˨ٽ-wX,(A1@L%Rx,ިH&]yXO}$BF}t/-,ޘ-_X1`JiXRrbZh֢0Ms1_щ0pJlo0}Zi,_F\\s ?%hvR{?j g8!D$YD+c "Rm] 3&` S8Ehq$vXTQNfd*~mk^rCkϷ]y.=>Ɓ=GB"e>g{F @vK~X`8|H.BFć=v T58b3D^]]aՍݚP9 |cI %5]ϗ?_5*7oL+>KN0D)jpt|{90`2-7t6, g0Q{p6ç.f~vzȟ),r&9g;r:8:Okvgs[1S\2#4liO ~\ߌaQ`0ۙ[~-vsk} {Q6Jvot]nʜ1iCyoZluQBs;?RP[,Q( H%0_w$eX,; ᇥCv% sqZ-M")̰GǗl(1Qyn_ BiTYz-˧u(Z,TdlH^L_/-0nb do56{;Ԕ3pM&k9~DKqɨGjm4pMM/Mұw|3)M<U @O s5jA xJ(Ai-zN}gO0¨COp;j5@?_Uc{r6>BZ9źOHzIm5GYϲ_O>Yc"f{ܡhHE 6Jd%Èk`!"à <)j`[ ,rJ]fj @=~ZO w̓ѵ|X` @C&aiaD50v9'* ?pڢ!~;9g(J[8 ι$,$`L Q 5Qpùt0 Jz"| aOpF=b&~0㈕t}WzuL*iftg 6%j`!4/<'ӜC{͍|PN˚-;A"jj!DJ}gW)# + FI? IT]{ ?%ZG&gP&n/f6`a|cdQˎ{_eœ509mG;븤jAJ?g2R _儤bTLOFq~q:?OKcl[P0?+Hs&X<:_v9"n0P/.#;t3"J{F' ,BU`w0+Ph |fǕ/2D_5'6lpxR Y~K"y2ù/7 5atT ױr4kpYT9f(Fc* XgN'ԓPGbtP}kTV sEP6yۚ3Ʒ?o8}X8a XB*=`-(=.1-tgyބP: ;ҡH R ݹؚS*29P fbъ~<-37Ѫ}d,E7W5WNhz5,D6F7aMɍIs/4VBh 벅 ءٱhE|z>50qvqܫvΧT>6<8$YAwVPbG{pqu7ɮ썲hkKL4+EZuLfYʢߝԪǤ&Kq|S~Aa83QEۧg`` 5`1Fꙣ<ؑ2H/sa2v8z;JrqhO۬3=ILb+0[3k( %Kߡwkat?>:͝ 4Mof!2 8aрb*&bqb1%bgן?ppK] ,  f|[ݥǷ}jQy6x.l669h˘*e~ AXt9w~}/,phL8p?E-ԝQx]l73Wh \SNZK;Xj؏GYQiyeq^eQ}"E0aQX! !bAc.!  l5F#{[ௗY-yjK~ڒնU0bN#,Z-&B"\s-Q İ&2 -m_51O0jok98hu^>5yoח(7 뚐>m@Fq0;¿Nƿ.F$ um$&*:0YlB.cW3nOR6z|=WU4)h0^6=IF c+o4RH8r&}۰,03J 7ͱH Ҭg)^ŴPɬ=?9ُɯ,Û4o/פyWaNy/bHs%=c"Ԣ _ E@2'#Z?xh `nc⬁Ԩdˉ0;;9(yQ[7Q&PꐣHc*ӱnF Rr/3 T879ӎC}0n!gKjj7gK tI$[q*=&BrݎmO# wU40JŬ̧d8ڼS3ϻ:`T[nipieA(]]-FBygyFzLgwWOk~'jmC48G, ŕk2}=xoGw9^_#֑n>n{%xLEbGuR'd\9դfC/Cn: fg:)ÿh94ְ)1@r3VtȑtY FZ̋xXUSI{Ǘd< -r>zؘ7ms/-N?][k:>B6xI<ȴaLmr:n8y MSyj$S)9j=Ҝv޽2": ^WKQj<!ߞMtډr gy~fcvJZFFIIOڌgEwpD]K)@ }M]~dj=Bc2j;kZu,dN7&KR0kNnOWhA za0&(cKA#ͭ)Zk;ytqO/Q*@5 d^PIv_t09{?_'}~!p,j0_}FVx(nzTh̙4F0QG 9Э? d_:y$U #? /ӶpAWLȚ:Njψd(su6K !8ӾR2%ݮ~_G툾13)l13PФ&LcܑYĤb e͕xi&(~hh#vsUJ2qPL[TP7)qiFa};ʬ)kL*ǻL׬1[;3U.v-ӕGWsr{ 2eGM7LX6`UNIb;LEپy 7Q {8I ۩3 ƺ)s޵Њ{e ԠBGgKPBy:fCgRtVC t۪D-2Y"ld`,eM2,<0BSE,XkPb;xp&+:C5gj.:M 6y(ZPDm^'2}<`[ȡٍn7qm԰W[˴1N%C9m7!Ӻc`:5tt;t8wPȊ}㏠hop >y1zʣ;j(`t?麡aúvH nDUOsܑ4#{TNj]_>^`tV"фl\v\EoخINz+Yc}.cqF0ݓz0>VDhk ]JFAL$P63\ؗr 8n3Q#y47_7'HnX㴸dtGnT<7}9*ѽ3F3U0:[Aq< -|t_$]WR' VPrZɵyz%e)'7ԡlN%W׺HZPK)<JNΥ"*9LA$S=}:Oum:~fW{?N>)ks<3v\(ˌ)Q *6Sb/*Vz\ NJw}.qTcitf8ÿs'|$hSBaf juO5 wnB<5MΉ3 P3QDL s𛨁ua)s1 9*efpLno~ǧ۽?k&.~:l I[*F/7翵xNUW`U,GWe+Њ8mZyCЈ%BU$XKRW)9QjY3s|$tV]΢WŦB~ٷ@G,W(?f5;/>\>]~3#yʊؗtS!8fwSfw%}H6ӵoETg f-үUQgC!)sm捤0ŕ,#T޺A8yt5[6 8*DR`?>L=:E=I_n^H 'IuVټwsc4ʠg\2":<DӋEIxdfwNbWH&tQəގEx|k1Qh5*He55&L%FaVVɞ󈙛l,հIt1[{L*L 걘T=9b0߷)5V]$]z¥.K84nVh9% L L*&VRYFoX>IhnbR5b==_$02Rz8ƦhAd780IX kpxDu>7J -:mKouUb)VcQB掣=8Bup;BNY )k;rz&ȥ/ 0TS*]Bu63 ,s-DkaBg:uz"SհqV GgYo?_>.{Y諬-1]ÿs*චVx< k"TM^H"O1??:F䓝YD;.hCDT<&@NK4n#Sew2}N#/^J%+<>]"sF6JN ]Leĕ KbJLUuU-P.8Ҕxa+=LUPyj4 ;ݙJ;9*D:Loe+qŭ}Ri%9GXBѻJ;޹JJ[$MRU<Н<;$HOF-m&ռ(M;l;[T8aZd=% &4JPKg7.W:NgS0)x+Zh(ڝ{FU{E>]tE@-)m۳NJν MeY)iaES>isV9_w),+*B5.+Nd6,!hKqkbSR +Тy v (dE/~'~Ah9N憉©u^W79w*s,S' ,D1ћ1ͼv[hrrF L"łhl&#ŸwwpQV#JJn˸XL<˵/0GgVdT-^]4XV/#`<$[VIi\0#v1+@%Pkƕcw2omVL<_{e296KQ)_":ЀZ՛WE>gGWL_ׯ Syc-Vh\E9z8Zͥ>a ixQĤ)5x P5KTaڏH } : Ӻ=0j4Ѱ!dખscos7zܾ;f)5. $ۣ= $(s{JE~ (O[m`5ۣr{ZFuQFS:3j3qN`z&{R`4uްтԺLv2vUx?0`4Q\:ݥ຺t\K)}@R< &U,SI%W+i9ٜa,f] 3d` j]Rz?x о k9%'meza,KL{Rbj44Od|KMr)DM 6to&^¥P]`u(+AGMW!&>QMz*0(F %p^bA + 29jjP[{?%Ż,HT ,W8ZK` oY> Y|7T_F/ZqMN$ P\WH!V}FoXN7C"ځ y.lѻCqMcI" q~c5癿Fu6ZS-spɂR 90U-U9[8lA&>0)hqti4$hS˜ ^F:i{s6eiݯN*Ɣe|}oF&dߙi&^&0Byꛔ3M!g'M7*"̘ܳhqJa'Vh+PSiag;8Fq1H= DQQΥ :$CrеM n:4/b5ן@I4zs- JHߛ!%ˑk3gP*D3G2ZrK9[kߴ2Jz ٻ޸n$W &u!i2;` ؗJZ"ۺZ-Hv,usX,V}UdG'cZHnuNzNNmSֱ9E VԽS^\)С8.6PkF1k.yT7B=1ޏO/޶ouM?ͮZ5 Pr}({.m |@dʕ L> q7vd?ojla"nT/̠ԅļ=otHjq#bЖ+I 3ҝ>%8aa, =vC`|bWuw9Y2Vt]WD7hp"2tLq"\ nǬ%e ;疽xW`4 B2&tm}mJX_m^rWْt^Z5tT=sk=ժX{T,٪ۏ'ġpד@E#Dj:!}8RqUCBP--UL ٚ݋'Q^dH(n53gtnhV=9¾ۑl>ѨM/kٚ#&DWB%@Ū{kjj Ao XM69kIv )h:m%6t$=G7UIP!-1Ytt4f(QբPw$j rC7WWǐbQ0l: PE#=AjeۤDIg`S5>$`wyX8 x<2jhG}?[ڸ8|?_KخE5y{ruvz!Jׯ}N +~ԅy/V5洕Y~y1⛵72­x] G s(C߼VY3BIvqrWs\L\/65sB?<𹺬.(F@ddv"򺅙-hafF 3l"k C {ܑY pC_t*ZL%vWuhkFs]3Sd< r\o) 5;tSV-h=IQU [YQQ!]w#ra1:;?:붒ΩIv_tNuuu;z8}~m{ľ)PXB6oO.pF9#NvocB =4NXw=OPn+ iCkG,5Pm{_xX$va T[aV0:QcڝuFcˎB5a#tANEqb{r,6Ab$礮D/49<'cOn;mqs#̕ cӓg -}>+oư,Hc[凜 ng2ՠ) AebyC<:]C.aֻgpH$ q(+}F"A}?\E.BWvumF$aŷȶgޗb[|IK=^ժWNv^s4y}kn|r'D{JtF1 ndF D!)'{lV&nǝLF dÇs Q.I6_-/ONˬ%Gwv|#!GTMX_gaXEN\Q\7$ 3 9CQEnyq'#b] Y<~W&T݅ϵ-pY JCmi~[Kb?@^mlP]xũvI {fv<h= J[Dd@x:mѸnjČ-XM}SKj-D\2sm<6jdY1O׎h:6P.PK!FX[l<Q{Hͱ2ĞS4ƻD5V\1BeO_8 R68Zkņ|.t Rl]c6Đ)U3SSk%X.X@X4GNP>3h6z#笡(v+MlqMhI%&1JL#T%ImG{K.6dscϦ&js($&Yf%ȿlBtjs2G1* )Mj1G\ w51)Uk1bW!eR jz9Օa si]5QՄ  fzecA4)->a\`kWYS1|R5*݉XiģB~l/Yn$/8ylխSGWK"лkɌJ&3*̨d2Lڟ9k4ZO˜IIo5`%\z<ŒКuZ:"fFp>\z(u;#Ro.fR)xvGQ!<(V;)>sbnR w`N6XhY8E`.hI䫆"DWDR0̊ -Ksy)̶[5TAX0~#3ĸ{{hn\ea v4ѡ-G/[A'x%>;ݠ5CjhNs֕;srΥ ?f}rn֏a}xegͽ+?[\ fjT8MnjT~1wO$פ<}1|.GȜQ5nn`:M-] !'b5h!\ DmЇVJz *O/~T] F!8Ǚq <UQT)U5:+g=ض=H|Ȑ< L~ D+]<ßȫrq>>V5pCy"ߜ͘$p-'_TTMctiMmMS e Cs8SIGeX#?pxu>"rhW)9Ǯ(sC'Lwmq9W6=ߘǍ}ȫ6%-홯_ff5iA6S*$ 8HYC)yV ''PZdz f9Cיx9t> ^Ӣph[0EONiJ[%&6?#9 -x8Sn}HyIgplA3p %[DcRU[T;N秂npMlOW28<_L(zˡYdtLW[]G.SNKc}fDxƐ'#αIi}"`0܂AC@8iKHފ/.Hͳ޷~9.( [?UUA#Obͥz2UN&Y "딂 AgWZyPgNvXJSllcXRF4S! +tF$aO o=.:j#P26tzHYt\% l^7a \n,6 1jg.ϡXL)0W WjP- &՗%VqQ3&^͂䑢.7(ě<#d}jbR 1{uAj*!zqElc!D h˨VsaEe>MS7PI(h[BtEM>!.ơ]~:BVfdj ‹l[1-{E/5Z Z!i5nrbvu2'0 33f^g̼Θy!3*vP \Ԣ.*.>MΧf;%;h6c$"ب7 ÀI}Tn*tb$&F+A^Ś6ք1|d H~'c*lFY-䝌a'cXoBJoa*!;D}%:{%c/a\<H\Dx*OmwM9E>]j񰅅M M! 5*L/ Di|~ӲBjpYz* ai\"i Қ;A>&1pܤP᙭sX(yH*_K^ґׇB\(Zd8 .ƔRN =Ի;*©fJ]_t|Y{ &2J gHoWsw&+JK9S*D BG85l8CR!j|~&Mto@jn\ݯ?|t|^K1TqR ~6{ 󔆳Lh12U4* 4B[8,Μ/YJ$XAy09On78 1'p.AvqqHDzw/;u(Ne4Tck =uڽg,<å~5TS[KLBަ2/2G3 &w9 JUGC߻ T4OX9OXYQVĈs+>&pIy '>w`Zݟ?c'ЫMuPi* |n3Vu@0T-/ *J+k⼮/@һ0^}\(U4 NsYWNFBdHf뉄KT HVImM ֢eYz#PR7r6iÝ ^S@l?]=_^T-V,ZQwzfKnb%e=WO-NɫF\5u!Msgݗk kJ#0gR?6 J hjS7<jf~;_G7N$RǹHFޢϮ+K2g~[n;lnMw87|I[$V_W]W?ɑ@0 1?"Xfp45̪f4{YwZ 9{Eѡ(ca1S>Pcnmn3}I\mB~qMW?TmI&EJ~aՔ>>cMzj醪V~ qV ¤PHqcS;CݍTEptzLy:s)"6KOn"6vB"\>N (2嘷(S`yVg><S^?XTl:]zbJbJYw#Xnb!O$җLahbߚIkhKZPy\2 p$Va ?1t]zhv͢3O%P-vx" NHﯟgc'p&U5h+`̅2;r Ro@ xT0%1@A7K^M$ )`[A>&0A A ~M&T#tp[T1F`][B 6c׍a`J4qo%4:s$h<=6QR±dLdl,ߙ>7?U9c衷V&qJv;R4*\o˓M2o8p]+?F_I/D#7K^A,Yd6O>3Mq'ȅk#& #r=&.ߝ\T{0A]}q]KUGjrvEfs zC[0ӜAuͯ4jhL±8Bb}3V%\01AJ2Դ퀄aoJ%fx`dH6[to|Cg7.S|?Wu2NZ[ʭE0[; >[?Ylxǧu˿ܲ)HOTU[h+/IxzgQӿ߇^%r,Yl?naCueY.O9*L{LGjJ[JJP:C'[^ed{a;ґ#Z`{`pcCo%(YcBѮl~U3nvL+v|P-CkJ%;.N9nv'ۄvYa:` 6 (ڡN> kޡWt;IwhR S2R\0s@lCxO~H!hg6O<i6$`v;;mCvD;oD-{v\"v(9nv-֡n#%h61qK6OnvM}5NZ/vhwmJӂ܆nv-ƴCo6o EVC1nGvY>t)ZSUw8`ѽm|~8ӳNTΦރƓ<K^ ?}0Bs9 @b TwBxイRI!GB{$к ?¾GBf$H(3fQPC J'hħta;En`&T] 9awcҭIe\_G Ssۧ >r y 3=;O¯^¡LFӗɱ8w}L4{yiY@Cg!ml0KN>?;`:klLބ|j&TTslQ$FlGsm jڠ>HjH!U\s.[Ė_rVQ\#Y*lQρz=~ՈH+ثLMՉrGg^#Ө m@nW;|LY%(.<ĮaABkGT;':bԶHV/I=x@/p0{o!cG>!..Įt? {X%kH w]JXf l``/.Kh4fcVZ. %(o.,=WD;UǿOS{+K4q*r;puV$Ҹ)1BFiOY}&J=k:0rUdG2I(]Ms9+M H>{1~$mu{/Ϭ,Zm2  ٹV{c3Ux 8{Rw|wsXP"o
d";lYNplp) apRU4 5VUUTdI@@Rb:Ĩ8,* DBTWB+MÃF !yb#1.8s7XJ6w^m& XoZ^,^>[:1@T̓D^ˁS',,S̾^~4e"7s^#͡L(Rh_2BPAPX L+ܠR5|p^H1˞ 5;8;Lu˴n7{^rf4S=y?a>a|CgM;YF{]׿aR|q!%CʡVS>~K,>f( 2N(Ę 4\q{BXk\982H e%!b$uuN^9v>P;.w_/j(E| [j=oX3-09+تin+)kYBQ1CP"p;HL .p5ojim`G/GjklS-  lՃCTojDHn0f 6, ?r[1xgT6?,y%CzODPc_16Fa\d{Ik<66ߟ n ;ZJ<[$[Y^S&ޒc8:[f`*KQeV١9P#Y J}ÕL>؛?q I ǽMFg"3TGպѕ-sRaR Uߔ>&lJP)*C9yVV?Mt~ (:dlPrs,nUVpBةAV<:*l\yT,M;G $+yԭ'$·?Z:EYs/GJdˏJahYn9b/*463w|cϗ8*wTM;B^u*)ajИ7)gL2ģۿ.}=4LǗ5Z0=NIu B j8טuA,9e8CPj1-PTnU IT؝j-ϡuDЍu<|ef>]^q^|[&1ZCѢIű6j̞ t;M݃ҬUNJNZ5@5`VĕmbͲ"n/$xhʺ3竕Yg3wĂU\gx=1މBk>zÏX. j6-D3r]t6`{{syG@9ksLӜ3{.cγ$z:Yp'ʩә}bkVS:DV~jE*BԸwVAdBcs|<|{o[|l7)]c4^CE9Eq)A5ثX$=lx5 e<0*)J̐TiK=h/N4͟W5^d73wXxI= /$>E4Uz[%S]6hBYαA(p((G>_>gYOi[i%*F52(vXǝ\@Z Sp8ĦUT;-6Rʺ9J nh8<(G`~kp=5߂ӗziqnfqCgTsQ]:3ȸ2 ANsFxP'fK5IZQosj6`bMOEGPOH'rK~V:3ml>uwfx{ndwD5*^3F}\:u;}:9zl^[c:Q` :/=wI.3o J>`TPTΒ4x4G j/^]?4hZgiϊc!d2P0aX;~VkejV*hpFU(@Zۖx2G M4.Q2ۑ@Gfy匎,T$grCCݴ)hF4>pb}\&MYnZn֣nm`eoKュ Nw*ns.sșeOOyfqY ~pMS*x߳G9n^Ez~]Suo"$D)+[zԄDI;ѽ 7I:;Ì]=:[{M3gnn Vi=-Km&^LՑ>,y=4:qj :GAnXϭn`'rZ@a6r=iRK^I!YTG&qI rʤgt@0>9] [mJoOcS=!;j|55cR"Vp9y!'9{rFEm8лavI[; (K:'Ogn{}boev d%3> CSF Nff|XjZ3 'BdߨQSw- rxwg@-.8zXzR> ى>Pob01A|9)'{ƲO Uz9ۡ~*}N"D=rO?gؑ;mC^ Nzh#tՀO {Ճ 7)LQ nTު]8idJŒFJ$Xϲd͖~c1lc+^ S7_)W8ĸ99Mɝ {O-vF02USV@9eY--1#lb^/k-nvۡO8PBz [9Gn+rΰP-FlS;UXb^Ml8vڽ=jGv7΁ ;%{JPG [9RoLxwB^"÷>1{pYܲȒ[ݓ'~<)ԡ/OD"M a}>tʻV??w+DۧEчn'3/&w?AEuK|. ~J)f_?TzmEһw=ߩ5f`<^;>g,M79aua0R678 )ScY.; q4\nv|Wo+P۾xAAQ73_~?|#]l5Ucф[a[{ =ƹV(F. Xb瑂Bv!%HJd"%S=L??LUu܂ՇF7k5~3aVZba$nP1hv*{ $gKR U]9 oԢ?QͺZEf +xB_df,0dVփ 7]6i+["9!U7D5ê@.$H>hMZ'6&9Ui!θYQgT+`O*Ff|(%ʢ*24KN˹Cjѿ{1'ߚǕug ??vь{ :3M{Ŗ2 .e0!A1ϙAͱ)mk:Xhꙡ@GfD_"QlE6({[ŪO#Rpʉ"] XH-&jP?3hs@PtPzPgz]Hdj$Bv6΢SxY+9=>LNѫb,TSqx)g67:V5!uvNO+9N8o&tcژ5UyuJ\ik:rhڙYWz-O2p*ٗRJ\\zخ+J@`y(RbBmUoS*6SV7:D Dau;AN\5n眰A8Nߪ$&чNo`W^vqfT%o3bܼfK.6;wb9a7 @Rmu4lAE(Z" M^uᖄܛlv Q.ۡc_/$]ꈎt*BqsD|{5S粤+tKکH7D?|v XB힄>"s TLmS%[oy c;|۳&8mO<,y5ye46g۸\;#5A'NT2>i^rdnSM.~hx͇wCÑr89/y/Q:{˛*XUXk[;fq>^[L.~0`%omK.9A G.Yts݀%fT̴dɽ*oMdAí%d&-٥1@ %Ap.-٥'/L|)Nys^9P)ۄ9BXBMjS𦨝>Blɼ/! f=eYyl-nK-l)b0@`$X_=X* ]BvU7XBvxZռc j*՜⒁%;!hʷڱzdcV @VcjdaJN5W}YSfۦk灵>&T_9 @v˓y*(x 7" O1XbϘiSj@«kTJUCvȎ0A; GaIJU U2e}&hB 9C6j#Nڕ:Ok&NZQ]Bv5dBvMBvOCHa.9t`1]BvM;:dW箝nF× X5y|dnc8?9E##AASK2YRp36!+ƨNQ [+ k2Ж c)9.kLW(I\lV\:qZZ]E>Brznxwc?>e~NvQE=<7akln']MEkQ$ek2^4K]O,DiUa+X=~]AK/ǖ_Xb? K΍tnl.'uMr9Hc- :76Q}pr'w׿+ʎa !-M=2n]LުGF@ zdnE;ɍ1 Zn%vU!)٨MyKPdnlᇬd?t6|0 7|HE# +ӕSF!U976ԥM2Wb7{*T&h&Hq3.bŭʲ/q54 T:ETe;m*NEU hxPeRYrqв[]/o=T_{wH$]xj2Y,gxA4Af@de5١׎Ty_m_!lg4{,O\sEȈPN,愃DAY15yG^VǗCLh 湻)p2p;_H2&YKaVw6#»ėK$$SP"U4SSQSҐ/#ՠ#&WmZ?tg[S37y=`q:׽~ߙ^O׿'[6 9 O8'맱T1GxhѭJ6>}tul>S63y.gʢug4%L 8F`q{2?VSZSExUsDQvA8C},C-V;  4LˣH wߟ ւ+MOv[Ɨ;ea4o͞@L(GqC/6;9 ּnR_)~rc26Q(Z??bWdt?D4pwͦ>f3-),͜fJH\i8u黋Y, fIuPa^ %pbbd -ouw7w{ѝ磑Fb>hA:>6`歛8mWik2b{z^6KzLURP"=@./ֺ"@7e 9'fBztu3D9 `*mGODx2zR yCsCt9G8-\FLxgSBҸnfYhvn[n 4T+PRސRiF"d4 :~'1D>駱D^o"K5')6H,]fgIN5t: 1[B1v2XD6]yKd9ȔK̭kp 9 0ri6QH9+\Ӆ)@\S-.>meھU TLG3 -v5~OG@ PhME2]=DHyzaOcPa\ 0ĘDյXqyIVi}q|Q{+}|U oP@&Oˑ@zܯvq6^nkm~gI{F+?3䘷@fd23XLO[,y)ٹIJ$+DG" DR꺺xoO>765T\(Hzi~}ttf_[NVGV4 8ї(̍~RM?cTCb˜iI)cpEQiW?_֍_-w؋#Voޕ)CBv]U5YcȾ[pkP5$=;B0ZTn'VO.kE[MvE[X- d_e3nr2a4\,p\7Q?~ʤT>sk[LCuv4=yN+]>._.EXPw3kȝ_M1dp6 8 W6nf׬-jo|f/ߝ|/cF, wi~Ӆrv]fd)f}#$7إb:i1]9pSڦTnb"#o&NK ^_*ى3Xcv5WS8w$DܸPҘݞ JN6Jհ&VJ~U_iN>k] ~0v~ߜW{:~8.!!(9`i=}\\{6H9hoB,v?3UYNNq[G ɒ[T'l'?^_\GSK>U1Xڀv9bW 4#^?7-57FqyKgI.^ f=wHJ~2l1U$۞cz_X5jMeL!Ef4%nES0%߮(ǝ'[;桌IH:X!M\RGM,Eo|1b`wrޠ\>n"sY [r$Vk#Ss'>AyHugrOS`;{*g< Ms{;hFXf6*Y4PV{ddPikoKo(ȗl޲VCKprsri8i}h?w=kj)mOtÖ &uL]f] >fVEo|DQby6C]75ytC Ss{5ِB{؊2RU5 [3/Ɗc?J*}o!M $+A#!XEa!:aʧʱ(%%SiaRNZ%zۧl&07V¯)rjcoV'pRE`43 ^G2mɀC4 |֣`8 0ƕF.p N;J *T*_i'*>Ə>T\DR[8=?!o;B`(* UOu;H?>)eeuos0rmS 2ČX:ذLʈ]@'*`rG+"jè  ŀͨ@7˅ގРѬ8<7uB56սO8\)b1GT:%& AJX$L+j4jڠ+і }Q%7Q8!)x(`$B5Ɍ6gR"'K%rT"'K%rz#ud%N` V3ɜ2҂ıԖ`XGu};Ga͉Ͳ'5u56[b^p^mYԨa(5s޶\StC;gc0JwH:Ӡ1 8"z&$ը@|`.KJk2%SCm a1jDF,A`+m 4"+G}!_?OG% W~">>|^2b.8eLx! 0^QHZ%P$&cu>}.f^hg^Ӣb/ WOYLuC2ODn:p}<= ?9>v~[˼w $rO^.Y]  vdEdW=5&JQGpVQ?8ș~%wo5^\A_ Gu` _u!gü~cH_RHOeXWP$3χ'BLW\\֘M9f2yeMpկ+zYeWTꝕG]i2Q)L{t<PgZTaoڦ15 ( e7TBuU+kcpMYdʞo ׋ γ;itjE%ZK۴޿ L+ Hݙ~z[{}*i$ik ֲvV7FwsQHrfb09770?~{sxoᵂE sIO^RsG]D쾉9(o$ŶciDrDQ{?RrO盄>[[ j=>'Qk0Z`atD<eW1FKt9v<,؝w)6O?Hի/FɇoK_GY4AgX%ɥL@"EH'hTFƲ_b)#>P`1Uws3yD(mcC/dAWdv,_ʔQ8c̑3BMRQ&5gGev:i1<}AĦ꜏ )t;*ތk $B]W1>c|e$&LݝRef L K[(WQr;5;*Tژ_d۞ŹfZ݇OI%gY4bu82pa)IeZyqhV5l 8MY'{DoݠOޯQʶVqZhA_F&Vɨ]Y|l1=-ĵۭQOi?lN:ZQD.Psn'4Dw9K$R!-<ۍMEfչ$'J5Ϫ;g g&Ȕ_ͱ ,ZĬ|@Y5GQ>bmW N.f48 }wS3LZ_oaNf?/O}:*hct{G2(Ăr3/d@LK3Q$B}ۯ5q8e4^n0F7ېcT W-` h,ީR"5OugZaRZBTSplP_ U̔C̙j.w]~͑MgNSHJ%!F,UmLʌf> jɉLk=֭٨M^Fbh"tYV;r \^~FY1&C"|w"?2Ig򆢰5ќ*+:PB:#r,FrZ^4F(nNwk\ avMOWβCժ_ %vQ"&MhȓA~Xy#MwiS]x65757 ҈P SCjB]+9Ģ(u b=5fS&ytѷndϝeص 10j|B"cvƧ6hփo!u34/Aja9k_F-&#8 9A5Aߦ/U }TX$Ms*FVbpy4Zi5K w;}Bj7=V.ηTjRFV(TK]GGO Dڀ"nIaihTp(-K䑎VhZaQ)"w*?IU~IƹxFJ *_ւ^1D F9#xX-1Gzj9QEچ;.sC񉙋klƸ6e>KgI|j D =s^rĨ@lt"pDl3EJ 4 %1Ǚ0 Es7FmC,nOݩb =Xo~)4n$ Tq09nK.jC6vR3qM⼬S*dRKQ˖-5d7a2Dr [ЄRr'WKk)8Z@1;`0\Q i jW05A*j)cO?Y-%ٍY/yi?.qj7Bw!\Ӿ/yЌ i_WĿ)tYuV:MɡVgDHfukNC#"54q6 ~3w=6%+?6ƵgB;dהC'bcWY$;Ofy .Y2bšl] _{.܂TٯnqrJ+,rD4-rb2@A'>iӛnEq:?NCp:v:XxoŤ :yc?R"A*uמiٰ>s籤t>eB5OHS^NcC,rp#{"l;sHaSETvkEm-aC\]=V͜ =!$ZePHk 09Z+ΌW 1ECKn`^ڐ74vfRek2Dіo<{1C̱ "'$IY" (y,m`Vj8 =A0CK; h({O((a7;8w^P#Ϥʏnxp "pb}"Jaצt+]'J5ZBT v3Z+wj+(ŗ; :WЙtVY 4w:o++i$N6iEDYAdVhjT؏_*MCœY\ZXyX2s͡ZnqQQ\LDti^o$j X_E 3 A{}̨J-sNSkmwxx=pfzSe-H f>*LXl慒z7oQݹJiP=?wvd'5B۬h(AayN-չA(n KR,!WKYhَ PBGF/gBC4E g B UP;UZ%W DKXTs0"ra8ӌVj\XB0ȣ0PSbβ峌ݥLX)p. %9F:gXj#50Lܻ9=c-՘ts5//'wp+^n2h_Aw)Max!ĚcJ)..DTrsL_^E`6XE!/#~w]@j?rYPcJ2&'i~v=r. =(Jhnw^ˀŏ 2pS$nI N44͔:R:XO)u0K\oI w85^N`A|V wF{. õLzFG31@VzF&WD$7Q9׶!N[=K3z1w?U KFFC`(9H*pB wgμμ{LP V0 FRJc,i`9f*,l^+r(b9L~(*e"W*R)2肰kAqtAd(TES]8v1.:Vk_\;-ZH 9֜EHY U4z(90}!v#é:5#6cA֛L+bǗc4ϖbTv5O^ s  -sFsR炕(K^#':"W 'P.#QE+"Ǡ(~}ucjtPI7ȯ=ZRׇ["`V%*Iu|NPRIxQVSd[td[B.Sd[r%^S^U+[/#*ml[IF59_N"ۺl[ B +\@.S@ f'/Ovm<2#<#pA@@$X4°ki8MCEij%Mv36W,9Q?K!kp.\O@X@ݷiG)TՁ' 栆5/!/<'[RÅ-̈́&L RJA;񓯅4VAΑ(9\.{k8~+AѳAR|86Tj{0k#81hjpqgNc #VyJ+îu]|lZ$6NU*'S'+;Y;&>B W0_*1010D—Ch ]D,97*=|@D#!49Q]a,~l=fVh04[d`bAEv4NIvId$;%i[d@f~n0\F\Qkdy5[w}ޙ[w|%$$<Cxb%) OIxj!,i&8.',I2p 3zv[VZ 2S#Ffbeft$K07 x1-B Y qK%e[ZSxdw UzʫIPVhlZb/8(=> NB^\]yFbѹ[ff>sAS9=(s SJΤ"CFe9Jw|`nF`X H#4ň¤oҮPZT Sj,9w0r Ts_M E+L@&]hw+ mȜR. VL+򰶬MFQwYu0E!5vkJ4ci2ZƅІ\J( ^iRCərKp˖d0@n}Sn9rK[(`J#wPFcՔBW;J<_[bM14/6bo̷ wוMJt׃Ԕ.}MkS)#PPZGAtPP*e8fS f]s!*27N\[+oV|o^UІ %&v+PnlHD^ A֏{`T]ԊiSg3Jr^f r%TDk77 霚\@6}ԣת%b cr.'(Q [W; k!KrK1q']H_:$2/ͨӌn7#>wѿ̌G03T{ nn\(ӛ2([=G/xѷ_0Mq[''K.ZVf }h*6 0aL `%Z:ŐX=zsPzV~O/tzԋv@Йɹqt06e^ZT2J um;3 jt@@W(2($c V"n7wԹmfAM5 "hw8J$nCXZK#ͤC!kwyI1"Ⱥ ﺊ]EV'V]w}N;̧L+7wߜɵYg:h:Ǔiar~l=uߞ񥯪1hiWC_Nb@UkPW-E@.>on.7baF5=mrbY`^~S‰Aλ>(4{khݶC +xдc_?`UyB Y4-m߽wrns4>3x(P{xd: #_}9~f/5➌P OVOŧU=ٯG I2z"Hwq6w)D.m~,n.:PQ=\6J [g&}Hٱ]8}61("2 dIMf4C$m.j2`W*VVꀌ5ѵbEy\C 6[ ^0i򀤵גۮ%\`y8Vrph[ǔMOS]G4@>w݇fah>c mA8-OH$_P?'WpH<8r-|&I[I=Ie!UJXd YI6)^Zhl?n6k)`1!$P]dhZD]fIIRA_t |iQ1=2#Jn,iw($TZ1}+`z*s0eu.# _VcYL4[,-@9/?]moƲ+?Ӛ\=(nz$i60V"K(w$J,R$%Erw晝AʨPQfxscI╱M\ի7i^$E+Æ-vc6?etX˫߮h_ Ej;RMiY_ќX*bM3>7 qzC-f9d!:TiΌCL Y+hc'+19b,ֻm<H 2`ӡhl=)m:?yc: `һntRrϴ$F)DbϾb"KJ!Bv2AC>nR jc̑@-;lqFjњaIƹʧ 6 oDȍ9HFox@Wѥ*}{+Ŝ0"α Av  PR(HY`aJQ&e5 k*nc)j/>ѭ6lA6b&T(rg10QS a##hp? +SI\u[a^oy_LʹMzVa%ZYԺ;0n\w,.:pPt YDI׳w_褿LHD_gnH \O{./ KEry;Y0J$IYNo:y]x)P>F|7T'^ LT]P(,T;93/|E snݲ÷Y}[Cꃫ}|ͮGhsBqDn;ct~Pp2Fe_Ed%q³3v c4ۍ?xu&7No8qɋ{3 =Ke8<Ȅ"./Ֆkm„W>v?5xk EAߩP* ^0֞L.&Ifgrgs&Fr;K M1뫒^D;9`GF(`&Y%|j9"Bot%JaCeXa3pUeiꂴ(FO!Z0+-o/blC /oD {*$ݡH#`Ph̋|<@AFsP#&}U `7:3ogVw.V)=c!מJzT`eCxdx5,r厎/42*8F<p,ҏDTԎ@<{C崓0c{Xxг&В"%PuEi~泟/Kp>:pS*=W}xġTԮ?sҨ%n8]9̦reԱ9wrrxA gD"k> d>$ٜI#'~<]pѰVa8>>'~û2 r hib`"G"" lVP .tFDƌtrϺLD2 %26a(!s/^ C֪XI*k:DL-;T-UݙJ0Us "bd`'n=v0g̝[;>`9C \[$M&F݃V٢2Yt0#gobRvXV=ӱsV/w( ]:㼫wO>}wuVoJt4vJ!qW ᧋*̗ha,؞~2 rCBc$KOt ]-c$湭6?(wdݜpLWف9lɺ|<2馿[h;.-phc_aaɉĞg8n40&wvЅCka6N%D0D;e8zah.vޏd|[!M|W{2LC:2;fSʗ$ze'rݕK:@6GFfsl|w<S(,3,OYl8zM {zr?:Oϯoz~{s'$ ?fwӫ{ +D2쓓غds`^P"_޽ktv=?|=M?wC9OR?u#=@4H.e#6t׫#KknY\ڃs܏X4ҟ``d<]K$)RWcWmrO߯ᢛ-:|ɷv|$-9Weо?,~O~|>??0dV.|G~ J/BoNz={ Pb{ֆ)W{GR5<vq/[&H&끱ׯ/P>pҐ:kKz|x_5Iqx1_\oĽϳWWzܓ} eYi) XMnֺU$ٿ¹4v9LY4Dw&}z % 1miPy6nI_[4&B)xdݑ9I :յSmT<Fc5S+ <:7O[hQaR_ZQëQn-VaLasUVi {Ï<{qE{".AEr 箇䋝,,kUm¹6r&-nu5UwFyD:fg 6,ǜ6)Bl^ݼi>@6Ѡ)Mx+r &pSG)R|EBвІ ^)TQ>dAM2G1ڂGo0هjqy@T2qY )ŕ 7<:ܚl)IMf,MʟaV]x izU$*UDj7sj HXQD6#T7Kg*Щ՘Hŏe2L+ɺ' n?3I^󥽐½~] DcK3ؘ㣱T龔wa1q }XQe {mS0x#X2 4Cx*E7'u␛%y,ѬfbNjOYz7Ln;^i{8@C&a>mA38 ^X˽? Do{}\E3'čsB8'čsBBX*V 3!󱏰o"zc]5jPUY5")*R*-)bJz8_pVMΓ^.[[JiPy)}5қ ~ㅐ(iDpqUhayB?T 0E 0j[CDJ''YXrTu%hqM(6\^^vGiN\j*Z͕` `gH"xkLA9?T)&DzSDzSDzS;. 9::*UvJ e4V)=c!מJzKj#2閝*3usIe-;RF٬oNv@sTB@b&sqJn9<蓷h9^9M0JҐyo(|.JtĤ겆9n Ss1߬5? XÛ4g ͓]sMӏP$P(Cy%TJja(WV1.ա4bAp[&foܯ,x! 䯳̳Y$ SI,b$),撳0#M yQdȀ&4A1R,MMu5ʷ*R3"` ' o Z!X5Х*3Vb"" (;XH6$h 8҂ޡ2!5h(0ʵ/*q5)$֢UgvĩyN ̧g-i 01;4 nbނ7& t?_g?,~\/{н]D__ǓLQ& FXxɿB_ò$UEfw\7S0*Ow/~šaJ\ Nh̘+$Y.^Pաsɧvpzo} b4N;atiw$Y{y (kF|7@2H$i'QavGp68:xxǬ?D&6T(5 kknH=sJ/R[vj06E*?3E$( )fnt'9EdBT81 smCǨd,,Us ʁ9 ܛG}Do&pϳO{Z V&BN$VT0x9UN >LO <>c@6ԍ3k: qJvB'v9V:v <'2ZipmI؊ yibCQKǛRg]O)Ks*1)! $ya]/ Zs;jTk!K8$u?=Ap4 &5 %B@MEbw:Px]`@dwUzBmb}uN9dt{ĨC$ ‰[xD81BYwAX9ʋE.4yD3a0~fG5`$!1 z pχv7M[ MĀ wmX' 퍸 0N 3; j7K5/]} %v~`%;TZ?ό-G:R p(n]S':G1S  _Rɪ!{XhLC,/v?sc)ȶ l.x|-n'%f㻡E: XۘSgq+^wx ŏ@J8~9 1!kZ^>A8At&Xd3Br왑Gxo֜yij :%K>R1 E:2H7CkmsqbUnY${ap2Pa;85@)aƿ_$]2f6B} A wbȖ i!1-`kkzKH"jj/drAIP{AH0"@-Vb#v~0 (mi} \6,kFܽ^SZh JJ!ȋ l̸>J0 >;<";}awV;,ٞCeDI,,DT%8˕k;ptKL8[;uN0F72N*癁{{BEKW:=pP, <+0Ȏ` ÿĮߞwj^?r[7ٛ+.ī|1 |'ܴ̳g\?4[x^>h|MHӹ:}ǤeV8 :d$*BE ?ae,V`S"VE~ WOORHf$ZIqD%%e0 H')4Aq@^f %.L? *%s0pV&&̮]zOzs ҲibTqk#؟b006|0ɫ*,v0Yܫ|f%+xsgh~j`~ ayE0c竿M-dx/fdtb^wkַ%O}}$)sЬiJ\BII$3ޙ/y9X/F|A{d.F+4j|_ƔBރ@YM~bg캾z RJߧؐ7yX*L#PwjcIGZkL)0qS4.~](,/ߦGhc>QȒeVc-(?Yf;ޫxaIlvLC~P[9~" K0k,"@8 QpZ&#-N^崹QF7%W ѝ &#r-RSEtJX Ub*@P[@9ÎĽ-"wBe@tW+޹}zџy }#űyS>.8eIBR %#W;aFy(^a=ȯ`7%2y7 1T&B `5Pi-,={S}ƂK$DX&>!!AB-J`oQ[BlZ V&BN} [ISa` " , A+5a_+ZZ L1 oE$1թ1I(H.ӥ#+XB߾绮 W]WF ¨NZ {VcֆPH8oNܖ >)Zy*cV0I*p`l,1Hv5lc@+~[ Hf AZY}(TxF3yrpg>7ίat7&B`:f S8uC^ AΝ1V/)|ڣݦR/uL[=ϐ8Ւ%AND>yBthu.06qlIxH8xg. xhZai Q!sS TvݞI[Bi=1֏!phRC3fe£WxRS*q2v4:3 =SbKAwEY|wV =[/=DPJ["O6fI[{ZA7,齝.R썒0i*^R5I  AQ %Z4uR$Fr h"07kjӺPf'1`uغٞ15FMmFF᯼ c 0!Y\9_ f k[kxQ=W̒zo%Cwn^tgsW԰ޡPf9U&ӝ o_9 H(8nUB)́qڹX`{})/'H$@_8Nl2|zPӑA: nYp΂pς"|jVc0"3':5ZiA0 gR{ Yf,10ѴuTvݦv0@eRߩwSgֹM@Io)yDT%8˕x_ ۑUA.{h̰r"]_daf7aFY>%=7;|$!!4slF5b,#ZdX3\iY^$H#F;J2f d\,%c 9zفIXafKpNr囑zO\!e}"7iy,8@+oÏ|`g >}Eo=ᣟ"g~-"~d\eNߙ,%b<ׇ%yKlHx Z>3tGs%{Є"ry aɢg~ 6%h"H2cMrV=9e\3 Y~3?Lo/̂9k$ y\,*?~!}ٿCQ/?Va2dd2o`6gʪI!bvWs'3IfMg]gV0\4_JJ)VhQ-ɻK>֔X*MMD)A:C:Px%WR]?JYGEWd|K-/zX1κ) W +7/(<#)9vT} lEjfa gT;[ Ϥ7 7OAYo"EfRliw⒚~_+"8~$B+FB/J9m .B_ĻI!F)Ц^ӆfLwF?yVNaIBZLL¿ǔ rvL ['Y" ”*vD` P{ Ֆnݷ+drڋ!A(a IH*#+$b0T#!@@q,l _C/wC2dr.&GBgFp6[d,xJ~&efW|pM&s)SL@x&+ˬNZx?8+,,ި ˮ^[GyʇP*AsywL{c[2bP4 AN. u%`Afuެwjyp ?H蒃eڨdisЬH҃!~:7pcp!聝ʹ#B46v6^ֱ{ -]<27`s@<+s:^Y|WQtYtfpUnGO r|ߟSݹ{xZoy='[1M >v_C3 NsSktm96{gKOby!L^2Y׭sbninf\tVw9:7xf_>{E:b>T;J~=~KW zNӽހ{ѡ |$|`Oi. az{C`zC3 y0j\|e3KJ Щ>2x"O|IʶFg'A$/~ SM9\/(^ƫpHçhA B5`AP+,(ıhaI ©fǠh hu4(T [Ari3,X" X_ oozB \Y w_B@/ A >SIX8A T$1~ELVU`bJƌvߦ sسlEoz;;W^cau 7W^NVsټsAt|13 a9zcհcEa\Io|ָ& VٖU"4XGbd$aL"i=.(pもL]r\? Ŗ 8%5H}7e FVT뵎T˧/[*SpaD}#,XKXۈ0ا:R-TB6j#JmiZP<[Rk5fPj9riQ1إjc.^? rYЭH$0r4Pk]_v =U>iVs-o>j !.~3o جȸ{e4۞sb?]F뙞);z񃶻jz3Y$uG -)Ñ#N礯;_ZHC2ջ\:FIE?Į!iyɛ<,wPōV"H/q~@}sDy7秨@R:싫m$;Z{,ХZNzۛm$; JhX⏑}5.DDoj#X:s[ -$>$ؿC\Ho!xXlcs2c-v?. &n Le+#҄DhEb秖6jVPlg'3E-{eI%ɆlVg<7B" m^E㉂^'^؜tgɀj8Vzu8ř )Y&ώPѓt69ύjx+j,cߵQsM4F1:U-ZKeGB]-oJ0F)ReNg~Ɖ#m- tchc d u.6'[|4bG%S'(PT[GҾ?X?Hh F#,Al]Ѽyv : nѿWIGwSs0ݔCRIX;'ȣ'<7fwq|rt2ķAНOIAjt9uڼg1k11.oRvݏyVkR? ekϕ%cH_ auA&K+{O$I{N.!!-xqn>GqZ7+M"9IA"`Ƒ!(&8E%4 Y=mbr~ZJŹJi P+d b̨"Q b%R3R!Bl(PJV& aEVwJZ]yV/|b@ny\}=_gv\ 6m9k?Rs?饯qSD!՛.Od>}:Ӷ m@`/`L>L {W x}a?Qww?tu#s|epưm=Bs,%aL\Lku+ɹK5O iWhpn\&h,fqgI!6iXbYP$GmEE-.8gTo!{^E1GJjc {L#|4*"&@,0GXQ,R#$%vA`"#RP]ġⴎ͟h~]6MVԝ {RmtˁUЙsepo17}] d9fmV ɚ FjqÎ<.$:^[v'6 i1.ģw{P16G"5IDQC/6DjB'g҉A]%RKC-X!-6ΟIen¥8Niȑ/8mO$(hF1*oPTNᇞ0yF ܏]S\*{>%!(|%0G ׫Bv_EH4e4BvSL_o*gQ?'cǓe638]o(@QL#Ȱ(tbPBAJo]Nŏ^1 Xq0T"c)MS$p%U|Q.5{T@G*%`s뱔iUIvhȽqUG ;<cD#B#:(P38c92Ұ*Ra$!&I¤;Ҕ6(ihC 1"86ҠdLlmJn*7+8 &xfKy ȁk07pE_3/ $r0ʹEk*cXV ˊ%B ĤJTD`j8 HI\if fLM,PV1Ma+19clSI0QX WDjb?̬0a$Eʿч5Hf> JˇmfNҍn> 4͌R,EvJ[Xz1F(F&!K #1-BEE8(yԡ'7AENs0(吢yWdP3٩S;MR'z4:|#}T@7p5!+!ۿj\sAHtJCd;J=r Av[+ ̥Z(NJ* dC|N qBݺy^Ʊ!z"O=P>Cdu|:d,o<\E*X٪Gdlnp9e>ot^oy='۱u:њ?;wrzxlap:ZlΏv` ڄ+ mO^H3vmԡ$NǡIj8Csr{&L/u"̇s(yOwbu7soF91ht禮=0*L>T#SzK Kι9W^TͶ1Dh*$  1f20& V ixn|^}TA#sykI"As=fԥ;E=zR]-/O4ohyfͻR#J|):3k@PpNA!LJAA>ti#8 G%@r7\҄q)F`LJ6&Ba}JD%RۊJPk(zJY|.׳{Oz !teF@ǯnԃ@we:r3hV@R4&&3#cUX EJ,ME4KH%i;/4RZF潦HJ>u3$~{a#0u \BljW`%W>_xK"JIdbM"(h0&)K0.Es5ť(:i>m qɭ&6h97JH\rupJn!;>sf'_FD-ht-gzV\n͔ԛ';z񃶦{gv/KILHѦ=f A Q_,Tp&{`IO(--X0k[>W`qBvVŗjn#u'A~*\,>zN ;$-.?qKIgj!RPu :v@!ۍ|$nBYAdm^ѡv[$]n;ϸpVln2W.]l2BBpf!<7$'] s.ߢ\T0:iWK%wj_.);/33R2$ YJ"bQ HO7]EֶK\^YfOhDZPDf )TH"d2cNϊ. U׹ǁKt2JgŴq`NU%L9[0D)P|@;w$]W+txNo?=c0Po64Ixs`0Ŋ^|?p<ŋ=\|? cg֞SeJ0* VuT"-:j QAAr~Q66z#h8kt=COG?O0Ⱥbt*>O eFY)dk^$&j4.$ ;UhѬ 黌Gdo{򔒊0 7[u~w ~+WecV;蕷J6:IhI{YJza4)9?ۯ~+o_X(fV ##3C5QMtB8*=6460t 9DyJrjiue諀=%>!Ta[l|R0@m{tJa\ߍRGT"Z^>]edD5댟&F8x~, 6lr ȁ\@ψ!߂@>˝y醝d]XPc:d_={>m撦]eBQZ#`uAgBUL|o^?:k$h+Y3ҿ /wf4~+N?gI}-^klvshh"Y!|S6ӧ"*ڐ0z.kۉdPee'X+T PIR8HB5c:1GHaYA, @2|i?mjV+֔ 8wH‰纥-*rV2 ,OwXnJSαp9%ieE4J\!9/yt! E,>h іTc#nZ%N8,9s,9r ^sLoMbk+#[}*ʱ^}lOXnbs_qv=@U(/A%,"RaeS-1"-R9 (=;*ۋ.C}Y;"\xG Ğ𨍚5@1vW , 0Ԕ>4j D F. ]+uVćFm{jEL!A B:;I)$ uevѣ8ɨJt!rƑ. 'BǃV!7q!F:ﭾZcebD{jX0WkXR 8ӼŵZt#'q fiu nhb׀f:k^m6C,yV}qA8IBv q/h)ᬉ17֪`1OHXX&Ք)yp[) Wp=lmU5*2rZ8v[d"*g`cvƱ}A5syz]}F uR")a4V 0pI#lč2,=U8j#DV{$_2X&W1V$J'/I„26((,IaML "_R&Ʊ "##I#`-a%t4"N(V&Ţ6e7GRA{p`@, 7l{[ElQ"8Bo(vqZzc3 9667=bb?aW#P+>c&36@[FPb됃 {;ZJ;L[:hMnI0rә^U~H,;G5BUSI̮GJ9G>6ꞬC旯^"(#bWN'Ԏ`9-* i_<;&xZW՝o椲iBC`QqRMU@u;Mu"v]pI'Fg4ݕdحw|+@G])kO{R7kFOjf;  njmJ8ϦVLcXE:Fb C:v3"GJc-+]iB J7S8]6˛ŧagry|?7L2*F]|y,`TŅ,߁jONP-"5;GG2hvvL1 1"犞cE ۞=g@EBOi׳RqyvcӾ< 0u$ U&]}D/kWk3mmflgt^n~ѷ_k՟H{3M7 ?#`l#nuQiuO \|>R}m6#xɇro;y<_@o0,ڞѷ&PȎ@D]t%%>gjy?6fE ;nQn>L\p8m%s3~XgYTy[)`JN3m=+$H9V,0k1\KSB`SH"ᄋN'sȨ(fƑ?b,l "3~ԷC/G* QK1mSx`6n_ 0k/ݟș!kCc);G)ӆ Oèc=8h<۪'Aj},xoz֏tS%glnoRGRf g{+2+o `|VpnՊA{AL'E3gO/r_.w m~x/1'mI#<5Ûۙ[}Ԟ~:6kts{K_Jo\F7ƾ@Msیmoۉ\3-(4_MQ?0){ ?YB|IXb'YNO' .Kyl,&NTߨgg{bo(zӛ|;?bY!D~e?n1<l5dn3Jki؟ 8&(QO@tܿZdM amE8od_|2%?Lx ydF{Υ8K=ߛ+J̨ʂ*>43/RsdkK|ҟD]MC]||R-m8> fyF̢3;fl!d-`Cv9ˈ2C_JƲ׍o |< cxZ{ #O`}5j ^9_Kꮌ ?8o ZbBɻN/{|gFVONdT Vt]ū"̬c H(P,Pl CM%ߺ,ՁD,_qH30B ̈=̃;dGzYX_r|i,xy,cT?"Z^-j]!q]0;RI%%.:ޣd-jTݣdoj iO}HExz=@dkS]u_ &&{e"B)GK'!!*!DVbT.ޫ ;W;e@" ر1;1*f;;9Mﯣj4=@Ps;<+vΠHJ^4BLlk߲q!ţzܾ UJV+͓k##[jdyqD)hdb$)" p@fkJӊWRK4DnIf{+>;G0^Sw?mwھrw30xWu REn|5._dܬGBq Bc5CHLP脶QE~LaA7LhYV|"$SdEa$Qn<(51Qv;:clPBKn5H7.!2 XnLcn<(#:mTnGyL؛v &vCBq )8WGByPFtBۨL -ꐐo\DdJ Yn-eD'*p[B*]k`Bj:$ѭ@B?,$'V?Ix̳v1UO'h8֜9^i"*y$D+pA(-N})d>$ qv:$\`D 'WlyuA?O?syFz+"o6cyDӒ$=LgI{6+"Txu  10 ,Hv˭ˢ=UԢ$J"YlCݔzWջ~eò{Z{66Q3=̘T" *R8c%"$(Sǻ>V7ǃnEAT9)G#GsJηcR4'+ߢ#‹ |h[WEdVҔ$ҼTdPD<y̍!tb,5Jbވmn!R2m%'3@nz1;nL¶_ 6:/b(rCΡAΡ̧Ca9 9(.$2 D,"H<q1[.QL"A#hlQəft1vǝ')GB:Z\Mt_Pkm`_o.2Ҥx<ƓMg"4QȰӈl1V,sHiH4dXep$Β8O_ybaڠ/OvQ3墳̝bB75m_k&;/O{o~2G}+a}l=-lRtX 'dp$%{Tg`r%Fx xYxa 91 -i DsYƑ6fd$cn 5  '&*DF$L,&J ,d2,2fCJ4IULT }E:6jt'M*2Z[ӵŪ%k^[j`ܙr$yWknT.T~CjfpDPf8+zV *^PDbVYc!#PfY XAe:Q $[es˵P_j;A.͕/F<-s꺄[wI Nǝ1zg#1|XNxn*T)yQkUi!d)0њjCT`*,qdފ|LB^+cnge]NIBfDb%N9}XTai3悤 ݈#AB@u&ʜ>#fJ5QǔS&PmR&B eJ4JB-'xݘ27w\:3f,3rT*g,3nang4׉lG>"Y-#-m}ߟߍʿm,Ό]ഒT/t7Q#б0EQyLx~҉i( `Tn#>0.LPìaмM85?AzAzS} $h8yuC8vczFrhdžƱ[aÇKkǵV ,8Q( $GI˘&ofN+geݸHqa6(N %N9\hx*>F4u)'hYq'tZ2,Н!R*k}UVQ ԯ8 cv ;H|%ѶJreb`7,*Q0f@Bѡ@nv^b x3jDbT9 V?Y(͡3< T+W;>R T:"R(B sA)䓝BiL@cdT& CݽWij4+$ߨM_$S Apv^Zl@S ٭p=wY% q0!Bz͑`ؐ !!{H c?9`cSXڵ·6?I n{^gx yerBq> i}b| dֽZZ.\$z)`[ yC8pÐ Ue!N8 2EhGtHYĕS11a1Gю]V{.p RY6k7ǎ.P $A궚J"c_XzZEXO|zOgo=sys RDЋ}Pd aO㭻q1&0QcQ^L|а%7 eƱj|M$o1È0%_ѵb/3޹˞b́V/y$0z鬬b 6`rRR8!"|V[HR<,QvN[jȀEї!CeI"7nbO-Z,EE?gOLvS> fza |>iwR9D rB>jIgΙo:-cO%Ķ1*+"mX;otJ%8 P=Z1݊=`VJ@u9u֎}F@LjLU@]Va$σ43 YX>B@Ww"Md/}v6^ tG[O͑,w'8RoE]d*bGрHb5ʛClrƎתgxӃ㹋Mq6[̏St)[8d>ڪuw91Wf+zH}mp?".{]1Xu3HH w O5^#(ANFeIU>(}OU(sU {bcΎ38^͚2qn4HOWu(fǺҾuxDAz:%,ЍTeNr%+ 92gPZ?1ܨ5: 7C8f<[t%#pF "A!X{RԻtSS\#o[TzZ$MblXu+9,. ~"lнАVjR^Jr[ e,@q<{kI C@(U^pKJQ`Ɖtj]Vï0@^0w/D WƼ= { VHOAwl$ÔT;L??|n2ɥ0˅%h%.i&Is L (e=W_x á|OKgw K!=dYͲ( (EOI9HS;GXfG~C=6ep!99YP4(n1q${HGB\.YhywbEp[M›ꏕíf۰CQ,L@,x2+Gh,sFwC,B^e.t sc9bOx#{=tۓxiFItb͌p "9]ub"8ce\ljƪ,̍% T^8ÔyW| 5nهI<^<#n|^2IB\{<$1~R+ A*@+*)c;ԒaB׀<׫/Rw-H(ND(Ae  Ul3WI~}Yf.̓Ԙg^nf]?axg=.gyX?ɻv*xz߽͝xJ L?W/N'ru~˟Wg?P\gކfyC׋6GH0Nʉ̏E8;"KIQ'}e> z +:n>a.$#R{FnK!?,o:N*]}<-oJK,?kbV''Tz::?\\.Wx@U>r^&i8P쀪q*$x ;("\5?8%jИ :̇xCM?f`(:N@7go=Q 9RwhTJT jpqtwoy8^R<ᄅj=׮ c("%V֫>!{ ᭲aP]zD|tr68\42f~ }aCb-gQRHI=T&{$ջw'Qkge1Elf(Œ l{rC\eĈޯ>XcvŖfTƐBbVE]3MbOr;sފBD^'MwVJ;Mkf| 5 krD Q٩g)zzn=:;4?=ykZH%laLйȋ@ ÂK++(r BJLi5Ǽ;9@wnU:X7P] 5)΍Hv~ Bdh{9sBLds柸$f; ,8ZO5Tt:-":Xێ9v5˚ȇJ6O pǻ) UDn|X1Ly*;υ=!PѬglڳ2)Jb" "ABERŅH 3E=,v<bEӂlE*/,|ڜ]QlqGIR5o`Z].z8%+7ĀHn|{ ",?r'RF4ޝaɴ  d \bp}j21=WZ˽DȞ[APD*^_?bZAwrA->[s٧<܉ GA"bo~!H1<4gucB8ȼSCZ_+~ph)L4"|֬I {9TsȔBylشAOH&cuc.w8 ;%޽nHժ\W7mϖOf x.WO ]>Z^Ђk8K7p)E_ -QfbxF튕H<ԟ$Gp4~ؽ  #,ҸJKiґs{3/فX)):xj A* ){hGKx$9^ jPOP;9ނK1ުix9&Y»ϯ/5x3^P,-WP,"o4@PF)g_W@+[s]Aj!MXƴbђgpRZf9ك ˦WHdRB h!BH`XFV8E:Np4K]K "I|Cŕ#5d,3[RBmQ p[VPت$; mk?BOHsn6pní0Ѷ!YsvȺxAbtƤr;b^VMN{m~n('}}s}ўHH$2FRϞf<~ >\}beXmwÐ\R8 f@'`w?`sǘ7gnQk|+WյK.ΫDw8<ӰZ0L{DA UgϢ$j;7'!Gso7Mj&l4}H+FmzwQ 2xǨIIm.}!eKr0gw%'T=zӢJ>Oge{^8[1i7Tx|C?M~],!xZv Teϯ>RN<!$ ;9׼:9xwTP#!hi™zwAOꁈ۴ĈlUuGMc0T^Mo\U[8,t/щ+ws}vڇD8Y!*qM^}40tmG/(cB0A#+Ul4b vJ"d^V 3e(/3e3gN_"IS+w@>aOZф5nkqNAS$МevkkXP a`$yFa\d$?`pIwY7ǣ$ƈicKea#\gtqx,Ö%L=٘ͤR!!,cql;nIÖ#9&#E \eRIȨr;Zޗod 1D n  9*҆8wMC8. Nb'xƤ7uA:x;Ƭ9IvV)c͂*'Hu41* %8@Fh;<8$]HD$wܥ-oP$.L;-cS 52nm87&7/w Z Tf0ҟ8R3Q9ZN4q6 h:m0,G_sZSm)~): ŌJ^=8I9 莔U+6p)mu9o}-F]`p_O?M6*cP)`%AN=}Oo%}7=~@9b^c˂X@P- f KUJr)(sYEDn),^>me=t w}7ߘ#;ͭ@ZT c+$cjΡZNltiu.#%˞[hTb"d Ua]Pi&wDIfD@ 8g chkT {ib<xXRI ol/ )({p"F9m7Ot VPp`srXټ q[Ͻd׫fbK3g+ uBQ)C!Ϩ -2I+ݙY!9hy2 VB* IpIZGEm"ƨBdwrc$=;'W dS9k{tW-_.5qϿţ柔0~<ݞ;@xoIs̮L]+uea|^Ik ҵ/*A$>DۻuzQ)+b#z8t}J}ѿrJLeQhpSXTHt iK9$m8 T;bDkOb퓴ZT68qIJBX͢p׾X\"Sm%,*YLKL3Y1M ^~\Nn#}@ H@V'ߗ`?4UsfcPs} #υGjWEI,f\!)\{AF' {$8 "Ql4 *؛$*)QJ((PƴҧȻ5CD?ox"L X"\6da=NNN;}y:חOQ^ú:O۬=&ߟuppHmt4oX.D6X /ۄB2iuB j+[9'pg]W4RNŤ-fx AT i2]h3Y8)-F"g[ZB h7B::/7B{u`oyvߋeQVX ddO Ao!d"'p]Țu,sjsn^Hn^}^ t6ͷIàF hQ1K@QDn184  FX85:Gѐ"COu=<(`PC<^/uuuy 2QT- I2v䶄"!}J(<хR5WA41@$;;=wW={#Z7i/3bS,DKZ$'ivT(q4U ~ Vw܎7ta Ż.v a%ݬ߹W'gi3\Ị]ک~tx\iXEj[]Y] VH;g6!UXg7KX( UJq=fHFŸ2پCqe!d@` x \F^LRSg"hH?{6AvvM$aqILvp i"K:r _5E8 UC"8~wO3Q UH`pqsǙ+m+)@RXfQc-ٟj0ןk}ظ1 /XFf@n5f*lN_UL/QEz߿yl7nY)RQ?H׬`~w4TX1ǝDENk l9Ei;c%LS%fDBn*2(e5-T2刨eT&c" UU56S{oG8B&rЩ$FrDBY#"XXp J*9AӇӊ|(FkEez6.$Z#\Z0uD90ۓ 5"x\v%E;aL)p.Zӗ~̱f8[R@e T*C?_kYu~;r=C]߸qv}#D y]Yy,PL x6ro|alJR91)מ\|FƵK PsGC~rS91ic9@.THH ECF#ʐ0d!BH5F0s+PM$@y=MIDf'+V{UyM\ud8L kpKe1VV5E:+I:; Gmecz5(|EF#0LJ9jM9q@I3_&XѦ FI|_L!"lxIfq|z akT<]TH%( Β CӆZjcGqȉ~)qhDzr(Bxl ![ņr'9硎 6*)+EZVF|WȒDG0r _7q{^w>E.>GdF ǵ!K'+oUJe^c^G> ̧O$O ALo߽z=kd")%|1^g <;@ $ގDDp`K0 ^2Nͤ 4-qr#oS齝 'pm 1Q_n/APÌvsץۜ qI*ĥn7L4+&~%U|Cӹ{F^~>`\t-O3%J|]g Ǜpw X`pFM{zMeLd(8"*FXŌT@%e=`Ѕ?zBVO\&*ro+@RXiaHǡ61p1,2¹p u xp"7JXJHDJsyX\R9ǰ 왫]u>^R;YѲ:А\EWtJ)yu8'ixa?wNjTdf@ ^fBv874<6><9^ IÒIzaL:eH8T lH?07H]u;͑QK a"9^Z"ّr nU-*>ߙ'k 'h^+BIQD#wԪs~z;)R3j-?ѝɸ[؏=ăaDCY\#?؛w]o /3lȲQdAÅq"QBnᏵ(VC`q:UƖľuI?, >4{c(R4ŋm@mƗm r0՗kT3\M-Zb4w?!_,Vִ`}T%(ha.zFB7߁.qzp 6qr++L/~{\Z(\ *"sp&6RY,f*XwkRs6 [ mPuRnYXdrp&6RXRr)#¢TEXX uR) {C3qZr9F‚:Daa&_P6 &cL@5s!(^#`uw!Ax}p#h) Yu|?L4ve~`*@f YÚRvuޟ蜅S*WRhn|pNav`qo[33,3`+8)Rɸe}A>m*%c%Se[ZYn)x`-0`Lc_pC句c1s3o_._ޭЙcJa~?k(M TR)xӄxf{&'XwzYqOnP3l?Q$:^& SoUKaO7))$W;Y~D3~8)\X bO'4Z]d ;m>N>Ap1U^`N1Yx), e AuuCB@R{ 84 Au FM'V8~fpN=eo0hPz80B5CtPlj "XS]4ų; IQm;şAQMI&$O$uO `GHspD!竐 [/G"{̫NN+~P9{Ϳ$?~{W^k^y*j􆁢+NcQ 'Fs9[2TYT,_5Zܼ˰ev?/fi'LꋺK㕓yjƭ+ӳ_ZsBJ;r2Lh.5RXIӖw<Ҩ;&ބi/Ifa!8Aĸa|nRT_*$ő@=ٜ]6/zvǃNt8M%1 8PZ7>PJK^{80D=ƕ9Bi# 3*P!ژگkv7+V;*pKipw}]w@i*z8b¨цCP'?*sNi<@g.?X -*L{ʢ!5tIz9"[*J\Km2S"+{1 5XL150G "BF* mYG#\7"VXW9ﻜ-{z^1׻4W!G䇔Z<X0#p cFU0FA"9CTvrRgXV#a%[]fwҔXE8IʏgW~?Ȱh Ng=~OLJ"t_wn/-\<ݸ]xΊgDeL ^gٻhKL /p&n]&ҏ7z[P_F[cg"Yjw_NϞCDt㻎x,w f1ȹ؈qkq=^t'̮vfϙ7uz;d5x0zt 8 |}rwg~+`ŽW G ]{,L ?P9n.axQh__q7az]pnew(΢ןݽ2ur,SG>@гqQpAUL ta5/loDy"v)hqU/vpmo7x zgNuwtq/2:7o0H~>^-_V.|{h(_߾=Й?~={#p&݁v6Uo0z\MAݿcULX߇a{LBKhsu۬gUcGtkW&({g2hoQ~ivŽlFϒaR] LU8zA?{ LK0>r7zr8^~O _ ǯj m<5âƌ{5Y0\>q6R<5kI͵#`8X ZafLMaq8)򙾹9`O5tv s?C{e縌co 9$V*|;XJF7:cscm]5ZWP.1mX[02%BYitvWft :vFPlAIؑ(6h06dI"9Kꛌ 2CP9j%ǧ8"ǬEV:(G: Ür6aPb#QlyBf0:BT?khrY噙R6UY߫Pc p}jl@%*TS?^eQ+Z쀑5{ROVt"L<]sFWPmwCzlWjvrNbB<_H@HHlH 1L=[~_eIYE:'g/Bnj,l/2U*S3p?yyQZ4|y`3.<1F)pyb!j=Z@B2D-CԲ2jyY^m,f׽╵,+g>_ؖ<3Ly65_w]5oW+g|t5?E$L[>BM(vQbUkeKSZxX> W ֙pG?^ʝu5]8%ҲxGE.Yi݉i݉(rUg"o:KާZP،G(Dd;<7.|4-#xZEsk Jx-[øB~itzwէEŤ?DW8NOVd#`GUL%vǟ>;LJq D+1 h@.{)ضSo6κJ[ `'ΎfPNA[<.Ix?Q@D26U-îOn~X ,RHHcoݮ,`Q|?4u7Z{0vJ0[v04+#uh0Qn]n6Jh 玌|׬&~<]6p{Parc@,FCbc |c ,q`JVc[fj%N"}xw uqi/udxH *{SݰCATtᗯzĮ(hp #gjV2~%f4NkfR#[kvMl4%=h}cY{R݀ WלѮ[]M9gCQ~'~w&c r K&VRq, F/{wohk|eX@}U'(Lg 汶+ 0}<*s#DLn5ѺG]_w}_*_tQڈ ADNn5`SZI~"دg/woz6m/7.r/7.rS{6q,i,5њX)74&d4,8ZgMR1**KuYݳljf4EG]*W~]bxOB|P9b%yF L 3$#\Y¹V&flk Z&1 n(R{r)ކ|n&ƥ`+"cKnJBxOTEu>C9u7oqQs` Z1{O?ǻgjQ:ǁޗ^fxƹ7qM(3kr<B2MDa(b6$"e3 %J^5bxM_,7o P1l1Lȥ4py׃dZ[2^;>ԝsdD:nMZx}2fѱIfMScTTD$8-26aR*XguiSsI=h,>#m,?PRF-=3-ӱf 0E"\T蘊D?G B29--#!!CxGtvLo*FH"EnjHq)eBd铃4@@h2}r_Kҧ.b~-]->Fk]h1ݚ2L 1V{XtD)gZXJ.޸b0ٚ]X1@6:pÏ/|ݫ`>QķΆ[;Ͻ0(q$ J>zJGS^J/zV* rQ(y P5=G)I1}=2) Ñ4B@qѫ#3 >btdn >bX}d@iV$ʘ3e$ʡ0 j2ĬnIyf|4'Zb/w1_!/b_ tH'|D@ZYSj#PYO6$Xjgs@ 1bXkbYf]$Fj3g4ʈY 8ũLyJ(,1k&TR\P)3J(bJa7F+\, 넣4$ Ig Xpp(n$]Q!c+T )BvgQ<`#Mog]qP6?dlI5 Q}I-ӭ!c\{"Gҍ4c!c;Cv`K:[cBvO3:1tύ$EHMj6I]M(p!$P@C6pMӍE/lERQ .h&i5&IIt8d:p&$cYI( t)!*XBKI:8-(P)xHaU r(!+CA<qʉ ΃POMd CA' )'&IPN):Pp$qٝlɟO  Egz ՂCс`r0+Cr]>]>m_l0lW9??yɋkl` q<~4U,Av3^7! 7e8 7e8 7.yFYt_n{>!y WNڨdT t8T2 t8PtVRPɨוlCP )m @ @ @\.㭦ƊP (V#I(m'T: @:p(qI:\tD @WʈwdwںsM(Â(EXPO,@u<-jϟp|^Ka;;>RdOB}LT}qma r0YVnJ XTdx8P$ƈa (XeVU*5H*#2fP2)9w=]>lt\MMdOc2-&ã fmPS4o&rO`lM*KWƋ|,'|Ĉ* ԓZ^+YOXz >|>UctR4}XJPlPBaqK5MSc7Gj#%&JƲXĔfB奢_Đ6LO0v;}fej5*0$,s+tjStEKA3HiŁgo è%BR `j0O L7 GzL;L6*Ǣܡ@ >fF^l5MG?m8/lj+I3yDI|6Lkw~n_ <ӟ?ч<_fpoWA2}NT'FK W''. ,MF5ݍ8?YfFyE/s^nS_~ۏ}Ʌd#M&Cde*u2}o4^YNg?GihAϢE6cߩ[` :Z1+YN,ԧJD8yVʃ7>#HcV;uɐ%;؅:qPqwCnq=MEpQ3ō ϦkG\0l5+f+mȺ˗Cŀ &<| Z#:#[MRjnRE!٭ZΩun-f,'+bAMo? a<>=Xܦ1\*2 "Vl/ٓPMԆ*uR[-c_Mu!"@АňzHF jQK1$Q'SSHޠ_;)~PnEo{^x{Xf27+̮Ϯ̄VJ^c(޶nܶo>JTå*Uh{+p z)juH"Z"cAGㅴՈL[$X7*U4c, pEby$ (U1nFcCXXs'Ō8W0֒+a7KaGE9F-')OHra!w״,F=UA"K-rf((V:E/cJF6"o,H$ѯB00 e c`0 %GJ\PyJɪxh@:C,FSn9$[ DU5Qhq(i }N8U,|$P?!t-C%䜋XI4TblGeD1{d4z[$P" eB4Qm }Q5!t* A(L5c1Ezs XuDϘKW7CU{%`c&0m&)'hã'FI1XLCH[a%r'J{|]IևG>m%f_A>GbZR9L)+0Ta, FR1xрG4xS2v(qQ!nu`KaǷٕ߁B99e>b-a80<^+Q;ZKz\9M9%"Mq#==`|7*m P6 M"5&o]r0Lx`LF,)yY;Kӎ>(qSłx*%ZdNVK#GQW`Pxg9>& LM  A'BGn >vO/7db!13i:EM0RgEd07yN׳Co>^zɝ \!@p=trhK+߾ PYNt@sQNNԒ*M_2@k`*MIT>X3OxcK(,-E-!YQc91eZΩ*!\TlA "@Ã?Hw,>ұB ( :%eJijVg][-&mlrCL/mAe T뢆ޖ+Ȋ[._T[cnmED;x@IE $KϾ+^*0zUYt).MsAvtv SADoi-SQ705^L{y6描jcbyCu湻rtHR uc 7+U=[,9xv2&3uߙ:'P]S0Gi, NAOi7e fI2ٛI +^P>?^@1)M:b6{KļՄz38>g!Jbek[?o }{u; '.<t/ Ý,#DV)_`.j~ ,^)bw=鯿<:z 9mnD3-t>Kk Of2zihjFyk Eqqۭ^̚F0k^+&/h0"nVUCw>CL1n8,ͥgz0mABWİ뚺npyl`K$ v~0LB{Z 7kp\8 8EU5g0E5bgy;Lko(0$m@,toB{h;*\==u3ٖwC[re?h{2"LQÔnhCM%BE0`tJţpE!obf[dGusPJl~p|`Fv(9eNj}fZ#h|cVo̎ڞ]{>>֞# ~L\U=( %y*JQԦ>0G%vv8FNxKo2koMZfeO9tڢzjk G[XQQs[\c Xm [dY+dƣ䇶>XGɄ@47Kv:F^l}UmyХl~esh-GɈzNii ,pa{qnǃu(z F.| _u#h=b[|@Z,0( DdzFI|dF`,c5 gu5 sA;D-8M46JK g{.i1$qi E V jbF&B(qmd#¢PtQdtMjM&hF~T`.]F4uL)+ 8gԇg0///nb׎ʎGe_l&)hFg*EYg/PvOfiJT T" 0MooC8d3.~s49 #m (LVA3QqOJ#gh| +T&mDPic hNcEj0)<1LA頨eHϢ h$AiS9Ssf@`5 в+) G,T3iZ!wkk-]r4崾PZ>6}ono@xLfaTog %~Ge{33\W~t!vpmd,XQp0,#\rFێJtH3tFr"b4N;5>(qSłx죈ZQBk *cr6hg ZvmZͬÆs` BUyK`F@xuB)_'y05cooXڏl|kv;10qd|8dI8 b33/['}eKpʃW&rD\QKaђc8q D;)?l Tvi]!%.yvκ\^^߹d)D9 !.Ł-DJ'1jO UX xf;rYtz. TwpO֥\.B.R.J.Z.tu(Gءdudu<]],?Y],bbIbIb^yXc/63hsEL#>x㕷DOтus5і:]]^dudubdu1T|X/V .V..eTHzv?oK] M`408C,䢐_ oh%vlux e/o>|l0܍ eR~hfrKKmχN* |hH=5i]ІxP)_Ƴ)i&kHMh,wm͍Im(~qJN&yI@Ȓ.sViM2x4TْIF ޘA[;{i b_)Th0?9Ų ȃgxtq8gOhDA8=㡶&AtTz<{u]B@QWpdt K~vդN&A % ȍcj  t,u֗o`嬯#@uMAvY K$];= T:bz&ݦcJ$sugdݚiڐ.dJ8[7]95n[C &Ɔm-8%Yn-H F2E?3jk}֭)>Ӵ])Ƭ7DOֆpm$S75nݘ<[S |DiXU%8N}Nnɺ5?sӺ!!/\Dɔ_nTeK}LbjN9U/R3m|oδ y"H4:55Gtuc#pJ5?7Ӻ!!/\DTecC<]nF[C &Ɔm8eY'cn-H F2W[7"i.֭GN%Q[o=[ELDkQkujΥ^K-B tnUəRRp mjEB kFq jcvnK?B^ h |ɿ8::8B(p28&&)&0ILyr*1D7P`L+%W'ŭ3pH~7]4z(N 82RK*c06V)01 pߜ709]6} ?$7y39@{: {qׁ*\ykxjcǵ@GqUk {pZ³N$W5]gDdgV\3,OYBwj}}Zзsg}75GhJ,f>$e-׾ PڸhS(͢|:9qaYl 4O {c? O=^xM=v-rP)Wh<~@5tuch~sU,`,q8nB"8+:x$>$+CW璛/b.y~Lj;fWvZh( W.0/l95'c :ݯ\j:-H~-z(8ѻ+Χԥ*z;ºA-e fb~*b-Tja'kcY9v脿>熧fnFJigg{{ >+04Y=#j !C+{xdTZnl+0]V `vu8Y˦`;$hdw(1֟=f7^0oi!ǂ=U=Ax5Ӌɇmvxi;TZ%~UK.z8`hq=݂7Qٯf+G(:y?sϟ嚵{/mi2["LƑ{5=M *^MTH%NhLxEZ#ȁͧ5o/[D)5[m,2uns7>*-3J{m, -]/7wF;Tkv[36B_,`[eņR{~B3Q4x')x>'\%\漢[> '1JIhNv'sa(C:E6RI2E9F),Qj\GѼ)8CE{x/u5m,zS[9…ϨD;.opKqP;;$A>o"̌AI}Vx]ZuV >D1RM{_>bKl40?fGzR6!K99w-|z9x~%q;iZ3te}/EBݫ:A 5_)8JNeQ9<}?}ⷭ2Lu!O[݂%;$\|l1y/g+u[._/G#I\u^֒׵Zo>qSWOsw aW''D9j wNdu (m MF77 Mwsq_3$@;|R{|)S-\1`sIM,اLUE2R%PX 9daR*0ó~Mʬ橍5U|jV}~ u1Lj1V&P\__vHCܰ 8f~al[xFo@m>j9T&\ ]WMBc S5JXe `a3 Zb2`i++W܈ T2t-*R0sraWh H-7t^jg.aVgXȄ*3]JBeM(ݪ1\08ުvRP|bqԪyX8ñXgf6TѝğT!ExJ.[, BX'H=ڶ$;n nUp#g(ߡqW:(bi:XGѧLؙthtC9FԦgm0Z+)6k1RJj*d4hGXL R#g5ɒD#-!,4 [ZVx_3 cGhΤ[DC[Y40=xtH} G (3k ыМ N48Ny,1O))( /iW_g)*E4<}<2!3@U]0U. 6WJ5q^(EqE *~+w5PȤ Ѩsٻkmw`>JXK7Ѵ\(zdfv K )Vɚ[X)1 '%rÖa)So'R}dx3@q(@EYC1`"q<(j{UՈ1\H/Xc}Rp*Lc\ꁵ+)P1;À7eT(gKPOT=-ljbJc9 } eF*[:NbSH e]g'Tg5% ?go.iF.>9?Y2 ر16!߿~4: l}ȅ_77d7aOuߤi)ڤaBI9 ,C;WQԿ>yݮʃ-n*eN'!,i qDZl$٘5eIcYx(0BSmUwT:X]2owfm,` pޑ1Ұc51~ITր?x^ L94IQ PDMȍCEMu %iHd9 X-Y׭pAc%j+i4؟$/p+)HXx .A=.$j"J1DCm5E^-5k=JyR*RP k& ay()!G}`i3z8NraTlFm(>41Lr~dzA R 1j}L;'Z킰^7ze?=99 ϻs>y?z:1R'tCP&;;T1yUUfa1M>LaN+|XH}CYDJczQa/ υLB"19ݠ^-L}jZ<?QCm!bnMx;miݖmiݖvyer;)@qL̘eQiD2 M20SN4b&Ųj\dPE؇/p defaYMuJ2œadSmEbCp(adQBUx-PFwX1p&(*~|RbWa 5´T;+x]XIГ`UgöMVbL*&/u*W`=9_Dw򗁕>UƝ;OG5k0nO 2 @!/ VڍU%C`qy_ixW>Yri/CISI1Y޺9A@Qr;$ Ir~%7xRo^r{–dOK[n.$Qp6g+X7<}:hN<ݟwzYrԘ]O'7jp;p xKӋ W5}噫3t>}e3}(;ןuΕz~>~ZG? ʯsn) JWM+K#wu2Ԛp&2?{j:_@7΂n27b g[Ҁ٨AXXN<et>B6cp+*%\^7K/;7[{W^_k:xY4_^(yz+x:<hp۳d ~*49̷ӗ/?,^;_@_N뫁Ǩu=P3G]\O~јR_z݌*n.F6nׁ~6&;_`Ҷ5e mOWaӷc@dW=p | !_9%pH]/W\LO6[o~4y/2nj:OoY\|/d~RI7Wّf?B[: h'3%Z ^fjVZmjV]J;d7b}rMQׅ 4Q!)hetӳ~sg0w'u?Lp|cb2̓ad]iLw Xap.m~ hy@9mpnJjwΧbaV'8JnDz {Wn4g|;1 `A"1.MpT4iɚ#UY,Z%J!!~0D| +âd2!*n;1 V<㇀ 3 ǝX?}gTfR*󕓈b|gʜ G2b \ ̷6 IA>^xH,"RuWLp#͌hd儲ԧ ΐϸk[ ypU/<\(Vb1Ah;wlJ̕om. %Xi-cɭB`_1d-)ckp:#v,Q6BI샢*>|sn!ρ0Έy|$J3q85m ڶ-HGa M C M:I"u[*1)VDE؇o* Q JRYDbU)esl J% M&R#Ӕ(=˴`kSd-HŶSd/VDDS{ GIû%"bլƐAB DLpffIF`4X1K@b-зը%夕<'AԌb#wVcz2pkB!zJ=0'1{%pLru9*'ru9Vz+* xUUإ+*ꖜODO<D5aP;n>{{gZõ9|@p3[õ9\ëU{+maa9zzjmAouN]&FV Ă3buߊVFԽj7v Xu=zkinRWW=L$B̤3 }3̤6kc>3ٵ5uMڗ&9 >&9ri#=M2RTeiVOԕק$' |$' |$%u\\NiuN i,ؖi}EU'@N'R(xՑa& Q4{d,x1 ډpu \S3:Gszq)|6ϼ 72x|U66μʕ@gHVg/ا㐍u>L& .\+_|zyNdw^=ap /w1+TPԊ|?^4YT~/T3Q?| {6FR 18ģBY 'J%dV/z<,d Q2.D& h|LyӘ#؟#% E399 11 'G/G!P ֗ys"1$bRI=r%.^;&)B2N!BZBJ\Yobq׳y/+`8Ph}%1 rmq'0 V!JICL﫼+v:.HP:L厬1xVht!NJS×Oyt_2-%~|i'?n _kDeG |"ђ1fƀA\Kq%wR x'qL nh$tn umaWzo,."Oe䳄tA &\|7P~&^(,G,i`|7͌xl ”?$p͉en򯹙v;Dcǘ"NVof514MG;>pt(H\wrvHQSJ#{ΠpKC GB׼4Y+ 8)LXq a(8Z/.EP L@HF/Z0JI(+&*)F桠: APhIxZAl*ѧ8Mca0D /@[!$IFH*VBR>ոH^4C+୨QҢZUgՂ@kYUjD͠V͹t2bƊyum>L1`*$r9p~x (sσ`D1n "c- 1j ); h2ي&ͧձ7 qGO iָHSHMH>E1hQ's)JpYj`T`X~x;LV`4W Q1%&Q)t)* τEZirF] LK%^a3F:E!FhO>I3v6*p(/ Q4<Mk$m6Y ѐLD5  ֳ=|{juj'Zjb 5<7C9RtFggP獐J%l߬]`I;I,@ Cg:A*I;ASp-"1 [55.gY=47B+^ EdX&4lBM4$5i5к%F2BOͻשD he:̈6r$pxQOdI)OtT%Py!_yUbLA =]6DD pBՃ@s:˄1 םRT'Jj׳rg:6ĮJlY;/[4U\w0ckZH:fG>'ݡhOIW [O@vr ixj0Xld'7m8y۠v/p6*fhw ܕ9mT# 0*iۗ١~D_c5;,}OϜ=ˋ_4|1 /de&fؔ5FʋSJ޼ZA>~n0X-+nbKJpߎ;2yo82A1g߆Y0;5vgV[qf5|wy/JMn7)OwZPDD޶˿_B쨜&-x'עvkr YSFOVuyT ڕɄ5dfO9٫DhK5GD)HV&h߆ɻnJ WeO>Ѥ5p dbЕ# ih?m͊t E+ )jZfLN甪KD\"E* %):d[O-ׅ^*8j^9 |>k &ekH<0 w\NԈ #`j4_l}q@kU0dS ;\?yYnp6L8L0 C Qk-:| !X"Z#r'Ӥa,oily m#GPķZOnH$bόqEE 8HaI Hs*97v; .΁Z %Y>vs l.t5$mkV(& ~VIRTUKr/ag,G)U3)x ov\`1QЌ !}4d-bQQ[LvW#)7EP5Mog(&8-h:ȶdef1W_)l5jpYőMJq$]C]&.S A,ATjDuf?Ely79MôbsjGdЮ_MJ-\lM7p:3a>{[n~xSnh Eb*/U>HŸP~R) i$!& Jdwho7J# TGU07He)rGcbD<8՟4&w=8݆[bңn`#: <";# Tp=atǔ'PsAzEDLA C̭2 rf(0 E5TqzăI\"TulO|Q](q>X.\-i?ss;a}DC)n2l3kyqlh| W\V¿fֆz&ΣZVdkϴ;^~5 UvA;oٹk7NfdۙΒkk2:^;l:ᜤ#,۸xUf2 PnӥGwF~k,WwcPɅBs@_2\\~0, @~?؍oO1?֏mdlW`W)0 {(AtFI1b-Z -{+I`L@J&ė1u2RYsp89 0u=} ~[R9oaiZig9. lf3cb4"'}`owZޜ~(#u~]NA* _zMMQ\Dt>*o/E4#i<\W3ѹ)ҼH;űma jopPBXk2>0/yBTp 9l.34d$r qkfz$F 3UQZb*l!dl]tϝa/tà%hjZ5B?0" ̇uKy0j"̙_^#oO 8)E'-lчAQd(=3Ѫ ?ъ Yˏh{QhBґG85 )<001pF`mƊ$6Oa)R9 j>9ʔXee4qB8a@ρ@pfil2lIvS+M*KcFQqixr%>SL׷,Sd ڮA7"GS5 3E>1FM`caTNC13cI,-yA5?!+QרkNXGΞoHv/>:%5%jgp9B+G`Q !-WCWyպn.qT \ OtVEX==:ч*B!ݜάUְ5eV_Mtf0(9b!ˡ(<θ:#oMO;mMA6,3^K߻e¡"a[ƅEW? QZ]'xzh=͒jAs%*h.i>Q}Vtϵ}nW9Hn# 7?]Ҫv̄n?TRnJCu\|YRݧ4w o_VA X~'Ӈn ݟMLM,[O]5^:Gcu6i,ܴ鷇o}GY6gJ)P;C޸VwNj1qwDr)hѭYftC޸VRgqD7;bFZ r\%mY 2[]t˧0ӧpN[1oc'U|L1BXET8(>%W']xd#p9A5&i{v8eyn ~k544ބ>An3}:8|"fIB: MPZt,Fc`COmk='AрlO0Oq[,Z5C=(losl%ƶL.p%SWZzo6Bײ\oc1 P 'BtJN:dbZ2εY`FɠXo9u2(>@qvfB#ۖ4i.{1{g,r. ֐e*4J4Nb oZX!Vj`sB&% $~OUZ<ҏ-9ރI!D8 j]G6ab|rJ)RB|JN7a%Φ.&'';U+NObnLN7=OX3BA)+-YT3kt3kӲ*N6<[(sծzH򬥐h9,  Q4 ʇ,reP"cXbB:~[+#ʡoưҗ1! \X`f Vӽ\6ۘe3 s 8DHcn%4'W'o&'[7G~J;j33ɄSl×m$Y0_>xw#K]wf BӴ-BYϝV|ca-i6y?I K\}z5c:<[FXT_~+ .sv=GV@&0O[:n5Yhl1 #ƀA8պ@:Ĝ$Ć+F[yB- px7  /ƍ67)NHCAI1'T\$Y87*fbZ$b!5MfU맄2Sj"/C*@5D{=_n?Df-il9lck8tZb@6ʺ FwPݧ,<rL~_Wyxi˩Oz\|KeIyhh$,w㔘 OiPt$J$c89ǘN%E*U~t%>M nDRie f|mBF~韀9Iֿ~P}@ t8S!LcۻkL $W5̜%95} 9'4c_|f wmuy50Ѥ'8GȻg 4~Bxe^ SkݬGR.dLl<ӣIYߌ^T!j6X]&UG5JqK1` .# JYE^ƟFctߧ߃9$:Z#oO:mB@/~˨25`' 'J.W^s]7 ~f>IjeԒJlP FuR3ZjCǝVSg3`BZ rjQ8ݰ9UŒXd%!oE|Jɳw,`k1qwD .i{cV03!oE|Jȳ8wg_AԾ$M90S/7Ȍn}xwѓ}ڳ:Xu|L:j]LU\eBuu)Fɋ@ZZdr!\ ]Op"9P,f!=7RRgOurv.WtEB$n&&kL>o.x ѧd/ǔ+Qs&4eɑMC)9YLJHJp8Z Ťa`?Cܳ;0L sa&N='+Czj1APL NYT":o_*MNIβyݎA|$CnmBXoR k!mLB>$0[F96r66h"|o`V4Ll#LbLj8فw<.<fj!m{I[\=G~7#JY&y~cn_9s|V/K6MCv =qDL𡄰/7XM{/z@ݷp% @1%i.d_$9"4"^M!^Wn9,]= Buʸ6`LPX]s( y)BRH*xܬ/jVzzzDl%Z)كg^n9Voh"Ll![#Ep(Up|4ؙXހ]>`8 &"g iDM!wɨ"O%22DF%26^L 2ʘOt5Z==:ыQ)3cA7md?IUԌ]Ӕ"=cAn]>٨K!8"NC?Ճgevʘi-{&s?=-9m/?ZWe6}-+bBw)(-(Yc WQąu#9`Ta-] lтez7a56˷8dֵCK&YHx1+45d,Q| ohJܵ:0^l?]bz*}qD T;}Ķ0t%5;k-Jq8VAW\o#)BkҪPMc1`EC\:r'Ҧ1c^:P9;Qz/1KT$IhrТeA4+P`COm9 &Nو*3֫uSK]c*wɱ{ͱ5y\p=J!箖'68Roe؆&%eY̆` >k<}sܤ&GpV.PÑP=sz4vNm$>gw M#2_ٱw 6_~t2@8+ùe);ĬaPAQ&Xtڡ:!r^Y24FYLxDžS1hܫ>B&B!pGc1TY{ S,`z2\v㱧왽KA$NrrH9$x _JH-X2Vtw&B?9gn{A@E2A C:(u%ΚZ:^ an:%vs78 ׫1ibwW"X/2I`  x%aE ȕb{.8*1\]j2H-xDZjдI[wL>8g2:Ol: ) y>4́j*gE:UAHl͑E \7 frexǎ8[\g|D&b.%*~{)%qQy[ 'I%Z gVC+*PYP^XiU'ąAO#Бzzt 72FUNU \pto)f8-*.5^>j3(8BT3`QhzfM974?w5HT[We\GbNUj.4}o|}r{;5ߑBoo9krz-(\[, 9{V1Fw'E{_V?)_TVlYt"c"LwWӄۛsw[=q)FkO51Ů7@69~"#F,}U?˜6f*9FXb,Hj9:˴9ؐ)f4]үO:KRX|}U-\cD vC%rpPƁ%2e0QB"6~ѽiHkl-01jq2^}RFL.v$E^c+0_CO/M6q7{FIH$X`ϙ\́$OۑM[duŗ q6Un$$Dd7V,GI,GI,GI,GMP.},5TR&UuVJ: &|D R\%,JQ%z~x|9=ou}@ wrËm,L9`.;_4+t!Ҽ͑rBwpyD J )+&OVh5ai0W ҁTv,:?EnQEa缂}e?>Zu;9cI.8?kS6p_Y) z TxXhVEUU[GQ8m:{PH9 y|J8 |<}(Ǯ1̣-0^?lSkiڝ c E \:_ CF&U*hA$_ U@eMC`a%n"j@t$ n12gHˊAF1$BTi)GYz4uDb J%\ZXI+%WNHgt Ұ1Xr=(:@)QKptoF),!OZ7J>;ɧ?=73p; ߙ? GnM~o\=R4ߍ>.#1 !fwol|=㇛iuÀG=wq"1o+ʛdzjHJH<&9?\\Xu%6Ire<`'^8c ofIvL^)}lN2 #@ugk(rE%kz|X9rRX7W JQ S[E+"n̲qiRfȲ_`^˼8d[Pkٲ +`p[Hk9!ؼ^]w@IvkASmf ;k!!\# eC-ACCmyaA{/$){uz p< fvJ 9dkIs/8jm[[DsW*d`, ekqXrX[@A5V"S@V:E\TyjHq40_ mŽ:ixrZ*IwoN~/'I*Y4WgS1ȯxr3~GaY؞^\Y(jSh@Px~2u0xE}wФ+5X[kVvMO+ut6ֵug7#^†(2(Qea3jYP!l3NnI#T//?dMD㍫Oz#`,cQ;O jȴUCkH:gFfF9ѕZ呌nHg5(C:V\6>Ig1QAāHw;!* ^%bH5\p윘4cas՗:OmC5xͿ.>~reUv6"*\QEe8TBFa*fgpݕ^e /^Fc7=bҙZkTp3V+^,kMJ]4(q8=NpVR:'kʙSU:DS%% #cPSl$eU@==]"y!Բk ϝZ4=+3Ca7S TerΤ$*mDBh0 WR,b[/ɺ:S5*N8GEOjeP492%P1ʪц OryXNj NbQnS/RJl,{Oc26zԑ{|U^-~#k:Yqcr4摪q( Z!jh-.ob)v^G>iRZ1U_,ѷK9m,ʩ"ZsP8dj跕n6Bרp%Pw+R" }_}-e1j; Py-5}lKC;ն6fO^UX[`huyW{ ëh"uͶKHJ‚Ci0 A1nKNev@;pput3X *nyEkSTWүw f܌<.^/yfWXylilo^.9'"-%Iռ ҞWܖ7D/Q kBc`X<%ANHdqj[$ѵi'R"A>@Ph`Ng*kG“FZ*YVSp$Z`)JWSn*4',òBxY=VZ3vԓpMutuUYY6:T iPZc*]bLIRh4@iJiu;el>ZތYWaa%6Jm ۨiؚn+HPÒkk !Fc T HPG:5%0li l[JJ{& bU;q JEPZPٮj$AMu 84UIAWCY Hi2PVkv$FRo~Nbd|(]U 6F;:5X|^]-JQWš"DJu  ֨JW(RXx] FR.zeŎDA0׶؁iagD3s1A(pZ5M[Myvj@ ᘲt[hFuؙ3Rp(Xoz E;H F˔=7ʠZ`}T!N )FVc4<[*BUDÂL0 h.0,sb(aPU ڊ؂Գس-9GU‡:RR 3lv}DB,#a%X1 $Z@rN}K;dǦ@Q NheJ]G9 h+- <9\qݓJ`$?]zƤ{e4G6s gk5bW?ߜ]>@FWy~Ov&8#n!9sq4*rD*xdobls{" c]ɇY8Wf1EC(٥ͯ@`;w7huh>bN PK՛ͣT]O}hJy(Sjp989*[k6:N毞5dp;}n(Qu)C3A՟gac1iE3}]_^Zo۞8\vx(ty;J*UUk_ Rcn[h(52ji!`_ڢ| |y" rQgivvp޹~} qT+MdluLH*^ HSmSai DKHx#|JX5!ojUF(BͳN%@sFؔՙ! w/\Whz}ZgXzzgvEH8 _OUGOjpj*q߅'ovϳCO?`{>*fÄUɉTf(|k'"鐖?+j4{X9ěڝ2եRF;xH)y.ߦ,ݻ78k!{5IuXhB '/E$R3rmyo'~̙7`/(4OQj:Z0BD^^@:kkP9%"RtCY "4ʫH ,DeR{GSd[A Mn')0.Nh|շsB\ug,삠m[ ڦݖb0Nd?6tfhn~D %K$J#QPh7_2D;̖HE5 -54T%F#S =TZ rU16l.chlT,(!쑮 $݅K [ge-H7b4X[ӁlQ}5g:s.9,ֽv(gNQ+;Ev5q{۷ߪI~4 8^<1(z8wg6{pԠ0f=[\yf=`$ ۬9Oj2ÇRY.d%!:oiȾh j [iVnrK]Nn82OHNGZ 2g - d[ n(9FW Kո+Z$H|Hm(AgbX InC{V2wڕb@`|7'w x_}Ke%K[ c,tx91I<7GAۙB0 A$*H#qBW(. h""9\Ji)+#njAV3Zq@{_oG#gYoTf%σ|rCw[ Wm%kŚNw4_UBy:<7ǿ GY%pUcr໙5/<U%WĘqgXHjX{% Iϒ{O8>LJT6 8KK@8M(PliT0{1 %0 KfTQ;l*PJ1ʁJwI=\7'5Jw1Kz!,,O(=FrG_s_c玾41N(=j pCFڑ$irihğξjnYk ̊CqۍSWoA++{:ʱ6Ǚ܌>uN&.]O`6e]mWMQMV?[{c h %&a^&iPbB5GQ 5 ;'u:QbFW\X#I Ҥ⬓qҤBGI3΋s8iyq ٭3/K(E-O1J<~+/v(^DVҮM*ds]:n'M $ϦmeB.BbB]FnȺT0.J/mLoovOn:Wl* Rig":JYLy2Zm+ry@v[e*0ǨZK-Jth8WQ ð)LU.T@sDjP qJ6|'@Ru5QvA pf*1:3[C& & AS1# 1+@=uIGʁ0!bPLK)KtS\0$vY8Z"DFI°ڈcŽ(0Y\p3 pQ}wg Pg^5_ǖـK*1.˷lxv2 ${&0yFYq;}t(|awҲXQ k#W1mKh$e8~oP_`[ci <a~e6rotF{ziglH= -K ) @9\PWJXF7k]ۃ%Z]{h[vŵ`,mHU#[jT[q[3GNw!}S45)ޑv:qTrTe^[:bp"y6h]S5OFj!\jrH5s랑j.\Mq\&Ӈ[*ZZH:buWQl⼾Ku8?:h⼚Ea pBm۳^*Vݰ֍0碌4Nj,7 DghUU@pOerv*l0 eM~}oH ݯ1%N7D*AC\.ISSK֭E\T_jMA˃w$ udQ*?v/NƽV|jc${DŲG6j:5f v&C c M~[f/>#vm<$%fÙЦҞp/%גY &󇩭rOd^0e~vB^8D0&n yW.}Fat6ժ5!ݚ@ Dqnz7Q )л:Ϩݺ\E0Yoѻ5џ˫zvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003661751215136710233017711 0ustar rootrootJan 29 16:26:11 crc systemd[1]: Starting Kubernetes Kubelet... Jan 29 16:26:11 crc restorecon[4680]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:11 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 29 16:26:12 crc restorecon[4680]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 29 16:26:13 crc kubenswrapper[4685]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.323921 4685 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.332967 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333016 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333026 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333035 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333045 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333054 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333063 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333073 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333081 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333089 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333097 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333105 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333113 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333120 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333129 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333136 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333144 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333152 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333160 4685 feature_gate.go:330] unrecognized feature gate: Example Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333167 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333175 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333183 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333190 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333198 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333206 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333213 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333221 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333229 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333237 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333244 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333251 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333260 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333269 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333280 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333291 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333302 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333311 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333321 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333330 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333338 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333346 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333354 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333363 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333370 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333378 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333387 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333420 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333429 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333437 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333448 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333457 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333468 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333478 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333487 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333496 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333503 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333513 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333522 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333529 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333537 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333547 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333557 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333567 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333576 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333584 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333593 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333601 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333610 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333618 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333626 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.333634 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333807 4685 flags.go:64] FLAG: --address="0.0.0.0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333831 4685 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333850 4685 flags.go:64] FLAG: --anonymous-auth="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333863 4685 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333877 4685 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333888 4685 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333903 4685 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333917 4685 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333929 4685 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333941 4685 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333953 4685 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333968 4685 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333981 4685 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.333993 4685 flags.go:64] FLAG: --cgroup-root="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334002 4685 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334012 4685 flags.go:64] FLAG: --client-ca-file="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334022 4685 flags.go:64] FLAG: --cloud-config="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334032 4685 flags.go:64] FLAG: --cloud-provider="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334042 4685 flags.go:64] FLAG: --cluster-dns="[]" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334054 4685 flags.go:64] FLAG: --cluster-domain="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334063 4685 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334073 4685 flags.go:64] FLAG: --config-dir="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334082 4685 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334092 4685 flags.go:64] FLAG: --container-log-max-files="5" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334103 4685 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334112 4685 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334122 4685 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334131 4685 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334141 4685 flags.go:64] FLAG: --contention-profiling="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334150 4685 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334162 4685 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334171 4685 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334180 4685 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334191 4685 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334200 4685 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334209 4685 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334218 4685 flags.go:64] FLAG: --enable-load-reader="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334227 4685 flags.go:64] FLAG: --enable-server="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334236 4685 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334248 4685 flags.go:64] FLAG: --event-burst="100" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334257 4685 flags.go:64] FLAG: --event-qps="50" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334266 4685 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334275 4685 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334284 4685 flags.go:64] FLAG: --eviction-hard="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334295 4685 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334304 4685 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334313 4685 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334323 4685 flags.go:64] FLAG: --eviction-soft="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334332 4685 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334341 4685 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334350 4685 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334359 4685 flags.go:64] FLAG: --experimental-mounter-path="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334368 4685 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334376 4685 flags.go:64] FLAG: --fail-swap-on="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334385 4685 flags.go:64] FLAG: --feature-gates="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334427 4685 flags.go:64] FLAG: --file-check-frequency="20s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334437 4685 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334447 4685 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334456 4685 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334466 4685 flags.go:64] FLAG: --healthz-port="10248" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334476 4685 flags.go:64] FLAG: --help="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334485 4685 flags.go:64] FLAG: --hostname-override="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334494 4685 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334503 4685 flags.go:64] FLAG: --http-check-frequency="20s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334512 4685 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334521 4685 flags.go:64] FLAG: --image-credential-provider-config="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334530 4685 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334539 4685 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334548 4685 flags.go:64] FLAG: --image-service-endpoint="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334557 4685 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334566 4685 flags.go:64] FLAG: --kube-api-burst="100" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334575 4685 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334584 4685 flags.go:64] FLAG: --kube-api-qps="50" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334593 4685 flags.go:64] FLAG: --kube-reserved="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334602 4685 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334611 4685 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334621 4685 flags.go:64] FLAG: --kubelet-cgroups="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334629 4685 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334640 4685 flags.go:64] FLAG: --lock-file="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334650 4685 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334659 4685 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334668 4685 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334682 4685 flags.go:64] FLAG: --log-json-split-stream="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334692 4685 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334702 4685 flags.go:64] FLAG: --log-text-split-stream="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334711 4685 flags.go:64] FLAG: --logging-format="text" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334721 4685 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334730 4685 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334739 4685 flags.go:64] FLAG: --manifest-url="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334751 4685 flags.go:64] FLAG: --manifest-url-header="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334765 4685 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334776 4685 flags.go:64] FLAG: --max-open-files="1000000" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334790 4685 flags.go:64] FLAG: --max-pods="110" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334801 4685 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334813 4685 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334824 4685 flags.go:64] FLAG: --memory-manager-policy="None" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334835 4685 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334844 4685 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334853 4685 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334862 4685 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334892 4685 flags.go:64] FLAG: --node-status-max-images="50" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334901 4685 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334910 4685 flags.go:64] FLAG: --oom-score-adj="-999" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334920 4685 flags.go:64] FLAG: --pod-cidr="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334928 4685 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334940 4685 flags.go:64] FLAG: --pod-manifest-path="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334950 4685 flags.go:64] FLAG: --pod-max-pids="-1" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334959 4685 flags.go:64] FLAG: --pods-per-core="0" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334971 4685 flags.go:64] FLAG: --port="10250" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334982 4685 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.334994 4685 flags.go:64] FLAG: --provider-id="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335006 4685 flags.go:64] FLAG: --qos-reserved="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335016 4685 flags.go:64] FLAG: --read-only-port="10255" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335025 4685 flags.go:64] FLAG: --register-node="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335034 4685 flags.go:64] FLAG: --register-schedulable="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335044 4685 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335059 4685 flags.go:64] FLAG: --registry-burst="10" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335068 4685 flags.go:64] FLAG: --registry-qps="5" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335077 4685 flags.go:64] FLAG: --reserved-cpus="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335088 4685 flags.go:64] FLAG: --reserved-memory="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335101 4685 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335123 4685 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335142 4685 flags.go:64] FLAG: --rotate-certificates="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335155 4685 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335166 4685 flags.go:64] FLAG: --runonce="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335178 4685 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335191 4685 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335205 4685 flags.go:64] FLAG: --seccomp-default="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335217 4685 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335227 4685 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335240 4685 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335252 4685 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335264 4685 flags.go:64] FLAG: --storage-driver-password="root" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335277 4685 flags.go:64] FLAG: --storage-driver-secure="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335288 4685 flags.go:64] FLAG: --storage-driver-table="stats" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335300 4685 flags.go:64] FLAG: --storage-driver-user="root" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335310 4685 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335322 4685 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335332 4685 flags.go:64] FLAG: --system-cgroups="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335342 4685 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335361 4685 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335370 4685 flags.go:64] FLAG: --tls-cert-file="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335380 4685 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335427 4685 flags.go:64] FLAG: --tls-min-version="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335437 4685 flags.go:64] FLAG: --tls-private-key-file="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335446 4685 flags.go:64] FLAG: --topology-manager-policy="none" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335455 4685 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335464 4685 flags.go:64] FLAG: --topology-manager-scope="container" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335474 4685 flags.go:64] FLAG: --v="2" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335487 4685 flags.go:64] FLAG: --version="false" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335498 4685 flags.go:64] FLAG: --vmodule="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335508 4685 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.335518 4685 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335734 4685 feature_gate.go:330] unrecognized feature gate: Example Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335746 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335757 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335766 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335774 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335782 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335791 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335799 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335807 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335815 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335823 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335831 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335839 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335847 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335855 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335863 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335871 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335879 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335887 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335896 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335904 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335914 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335922 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335931 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335939 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335947 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335956 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335964 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335972 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335980 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335988 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.335996 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336004 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336012 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336020 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336028 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336037 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336044 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336059 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336067 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336078 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336088 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336098 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336106 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336115 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336125 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336134 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336145 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336157 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336168 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336179 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336192 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336204 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336218 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336226 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336234 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336243 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336252 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336260 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336269 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336279 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336287 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336296 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336305 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336316 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336325 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336334 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336342 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336351 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336359 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.336368 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.336418 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.348292 4685 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.348374 4685 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348543 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348594 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348610 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348620 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348631 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348639 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348649 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348657 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348665 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348675 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348684 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348691 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348699 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348707 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348715 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348722 4685 feature_gate.go:330] unrecognized feature gate: Example Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348730 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348738 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348746 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348754 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348763 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348772 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348779 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348787 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348795 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348803 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348811 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348819 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348827 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348834 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348842 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348850 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348858 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348871 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348882 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348891 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348899 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348907 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348917 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348926 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348935 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348944 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348952 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348960 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348968 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348976 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348983 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348991 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.348999 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349006 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349017 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349027 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349035 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349044 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349052 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349059 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349067 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349075 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349083 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349091 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349099 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349109 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349118 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349127 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349136 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349145 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349153 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349161 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349169 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349177 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349185 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.349200 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349483 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349496 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349506 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349514 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349523 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349532 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349540 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349549 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349558 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349566 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349574 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349582 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349592 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349603 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349612 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349622 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349630 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349638 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349647 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349655 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349664 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349671 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349680 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349688 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349696 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349704 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349712 4685 feature_gate.go:330] unrecognized feature gate: Example Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349720 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349727 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349735 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349743 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349751 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349759 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349766 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349778 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349788 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349797 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349806 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349814 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349822 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349829 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349838 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349848 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349857 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349865 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349873 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349881 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349889 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349897 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349904 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349912 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349920 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349928 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349935 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349943 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349952 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349960 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349968 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349976 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349984 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.349992 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350000 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350008 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350016 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350027 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350036 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350044 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350053 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350062 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350070 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.350078 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.350090 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.350434 4685 server.go:940] "Client rotation is on, will bootstrap in background" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.357177 4685 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.357358 4685 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.359975 4685 server.go:997] "Starting client certificate rotation" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.360020 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.360223 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-25 10:23:34.854452394 +0000 UTC Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.360328 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.390964 4685 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.393228 4685 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.395928 4685 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.420234 4685 log.go:25] "Validated CRI v1 runtime API" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.468849 4685 log.go:25] "Validated CRI v1 image API" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.471672 4685 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.479197 4685 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-29-16-21-55-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.479264 4685 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.508318 4685 manager.go:217] Machine: {Timestamp:2026-01-29 16:26:13.503719237 +0000 UTC m=+0.701075579 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:ecca5e7b-6a38-45e2-ba57-5071f788384c BootID:c732ad58-05ea-4427-9404-d852b5b303c0 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:fe:4a:80 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:fe:4a:80 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:66:3f:3b Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:73:5c:64 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:25:ca:64 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:c6:db:53 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:4e:d9:08:68:58:57 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:d8:c9:33:55:94 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.508829 4685 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.509025 4685 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.509572 4685 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.509957 4685 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.510028 4685 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.510455 4685 topology_manager.go:138] "Creating topology manager with none policy" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.510482 4685 container_manager_linux.go:303] "Creating device plugin manager" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.511049 4685 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.511108 4685 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.512526 4685 state_mem.go:36] "Initialized new in-memory state store" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.513259 4685 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.519047 4685 kubelet.go:418] "Attempting to sync node with API server" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.519127 4685 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.519258 4685 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.519291 4685 kubelet.go:324] "Adding apiserver pod source" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.519321 4685 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.524593 4685 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.525761 4685 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.527945 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.528171 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.528163 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.528301 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.529380 4685 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532083 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532117 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532125 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532132 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532144 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532154 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532179 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532197 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532207 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532219 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532259 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.532269 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.534713 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.535681 4685 server.go:1280] "Started kubelet" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.536333 4685 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.537152 4685 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 29 16:26:13 crc systemd[1]: Started Kubernetes Kubelet. Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.538632 4685 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.538661 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.539584 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.539624 4685 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.539688 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 11:40:27.329070212 +0000 UTC Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.543256 4685 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.545169 4685 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.544496 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.543316 4685 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.543292 4685 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.546140 4685 factory.go:55] Registering systemd factory Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.546188 4685 factory.go:221] Registration of the systemd container factory successfully Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.547357 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.547655 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="200ms" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.548840 4685 server.go:460] "Adding debug handlers to kubelet server" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.549812 4685 factory.go:153] Registering CRI-O factory Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.549944 4685 factory.go:221] Registration of the crio container factory successfully Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.550128 4685 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.550257 4685 factory.go:103] Registering Raw factory Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.550410 4685 manager.go:1196] Started watching for new ooms in manager Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.556715 4685 manager.go:319] Starting recovery of all containers Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.555288 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188f40633c2d2b3c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-29 16:26:13.535427388 +0000 UTC m=+0.732783650,LastTimestamp:2026-01-29 16:26:13.535427388 +0000 UTC m=+0.732783650,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569001 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569050 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569061 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569071 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569080 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569089 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569097 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569105 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569114 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569123 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569131 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569140 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569148 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569158 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569166 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569176 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569219 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569230 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569240 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569252 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569263 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569275 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569289 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569301 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569313 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569324 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569342 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569355 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569369 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569402 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569413 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569422 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569479 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569489 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569498 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569507 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569521 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569534 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569544 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569560 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569574 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569585 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569594 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569604 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569615 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569630 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569642 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569652 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569662 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569689 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569700 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569711 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569727 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569737 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569746 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569757 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569765 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569774 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569783 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569791 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569799 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569809 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569817 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569827 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569866 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569877 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569887 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569895 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569904 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569914 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569922 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569931 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569939 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569982 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.569992 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570001 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570009 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570035 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570059 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570067 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570088 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570098 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570106 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570114 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570139 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570152 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570162 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570185 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570206 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570227 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570236 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570255 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570265 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570275 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570285 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570295 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570341 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570351 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570376 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570385 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570420 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570442 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570463 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570477 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570504 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.570946 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571022 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571058 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571075 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571101 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571128 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571141 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571164 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571182 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571196 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571211 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571223 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571233 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571249 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571260 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571276 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571285 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571295 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571307 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571320 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571336 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571348 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571357 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571374 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571400 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571411 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571468 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571488 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571511 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571523 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571543 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571554 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571568 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571581 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571598 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571612 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571628 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571639 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.571654 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.573931 4685 manager.go:324] Recovery completed Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574890 4685 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574928 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574952 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574965 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574976 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.574993 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575005 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575016 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575031 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575042 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575055 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575080 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575091 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575106 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575121 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575140 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575151 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575162 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575175 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575186 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575199 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575210 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575220 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575232 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575243 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575256 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575305 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575317 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575331 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575343 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575361 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575372 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575384 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575413 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575424 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575435 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575447 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575458 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575471 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575481 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575492 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575510 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575523 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575546 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575559 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575570 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575584 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575595 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575608 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575619 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575630 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575643 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575655 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575669 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575680 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575691 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575704 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575714 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575741 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575754 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575769 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575787 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575799 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575813 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575824 4685 reconstruct.go:97] "Volume reconstruction finished" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.575832 4685 reconciler.go:26] "Reconciler: start to sync state" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.587469 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.589486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.589519 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.589531 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.590567 4685 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.590578 4685 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.590603 4685 state_mem.go:36] "Initialized new in-memory state store" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.598142 4685 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.600298 4685 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.600360 4685 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.600438 4685 kubelet.go:2335] "Starting kubelet main sync loop" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.600518 4685 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 29 16:26:13 crc kubenswrapper[4685]: W0129 16:26:13.601573 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.601638 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.605657 4685 policy_none.go:49] "None policy: Start" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.609436 4685 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.609465 4685 state_mem.go:35] "Initializing new in-memory state store" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.645696 4685 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.674588 4685 manager.go:334] "Starting Device Plugin manager" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.674646 4685 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.674660 4685 server.go:79] "Starting device plugin registration server" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.675235 4685 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.675270 4685 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.676082 4685 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.676249 4685 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.676258 4685 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.689593 4685 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.700821 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.700929 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702132 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702143 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702296 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702666 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702734 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.702995 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703024 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703125 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703437 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703521 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703693 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703728 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.703890 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704104 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704207 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704560 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704589 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704601 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704697 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704722 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704753 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704930 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704956 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.704964 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705060 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705252 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705319 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705590 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705640 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705675 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705690 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705699 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705897 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.705925 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.706212 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.706248 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.706263 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.707604 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.707633 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.707645 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.749936 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="400ms" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778278 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778278 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778461 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778490 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778511 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778530 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778553 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778672 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778783 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778847 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778873 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778903 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778931 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778954 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.778979 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.779046 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.779703 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.779740 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.779755 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.779784 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.780199 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880162 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880251 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880282 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880315 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880337 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880359 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880383 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880453 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880544 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880600 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880613 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880603 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880489 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880670 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880491 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880606 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880794 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880903 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.880868 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881016 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881073 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881116 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881133 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881147 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881156 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881180 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881203 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.881224 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.980826 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.983077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.983150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.983170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:13 crc kubenswrapper[4685]: I0129 16:26:13.983263 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:13 crc kubenswrapper[4685]: E0129 16:26:13.983944 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.053580 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.063956 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.104625 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.105602 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-25390eedbf0418c81ffec8c161e02a70b48257f73738a18409ae351d0fa3d526 WatchSource:0}: Error finding container 25390eedbf0418c81ffec8c161e02a70b48257f73738a18409ae351d0fa3d526: Status 404 returned error can't find the container with id 25390eedbf0418c81ffec8c161e02a70b48257f73738a18409ae351d0fa3d526 Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.112978 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-aefe6e6972405056ab2dc02ce45c7e81ecb4a79d29578c5dacf4ea7e2e7be42b WatchSource:0}: Error finding container aefe6e6972405056ab2dc02ce45c7e81ecb4a79d29578c5dacf4ea7e2e7be42b: Status 404 returned error can't find the container with id aefe6e6972405056ab2dc02ce45c7e81ecb4a79d29578c5dacf4ea7e2e7be42b Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.125253 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-cdf58ca6af3fe9de61d5dc91da6f9d1ca833f319ea7f4fec708a89096acefe03 WatchSource:0}: Error finding container cdf58ca6af3fe9de61d5dc91da6f9d1ca833f319ea7f4fec708a89096acefe03: Status 404 returned error can't find the container with id cdf58ca6af3fe9de61d5dc91da6f9d1ca833f319ea7f4fec708a89096acefe03 Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.130590 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.137887 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.150147 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0a9fb9f6ecfa7a77b9bbe6eb59ffe2d16f79d6864b429b344e59b390db645fca WatchSource:0}: Error finding container 0a9fb9f6ecfa7a77b9bbe6eb59ffe2d16f79d6864b429b344e59b390db645fca: Status 404 returned error can't find the container with id 0a9fb9f6ecfa7a77b9bbe6eb59ffe2d16f79d6864b429b344e59b390db645fca Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.151116 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="800ms" Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.153360 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-e917d19da3dcbcc02793270595ca874e752a6df66cd12e61a9f02e2ef2ba951b WatchSource:0}: Error finding container e917d19da3dcbcc02793270595ca874e752a6df66cd12e61a9f02e2ef2ba951b: Status 404 returned error can't find the container with id e917d19da3dcbcc02793270595ca874e752a6df66cd12e61a9f02e2ef2ba951b Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.336770 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.336876 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.384994 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.386099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.386132 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.386142 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.386162 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.386639 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.421721 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.421871 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.539865 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.546154 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 18:03:36.357428918 +0000 UTC Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.605539 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"aefe6e6972405056ab2dc02ce45c7e81ecb4a79d29578c5dacf4ea7e2e7be42b"} Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.606996 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e917d19da3dcbcc02793270595ca874e752a6df66cd12e61a9f02e2ef2ba951b"} Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.608471 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0a9fb9f6ecfa7a77b9bbe6eb59ffe2d16f79d6864b429b344e59b390db645fca"} Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.609556 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cdf58ca6af3fe9de61d5dc91da6f9d1ca833f319ea7f4fec708a89096acefe03"} Jan 29 16:26:14 crc kubenswrapper[4685]: I0129 16:26:14.610372 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"25390eedbf0418c81ffec8c161e02a70b48257f73738a18409ae351d0fa3d526"} Jan 29 16:26:14 crc kubenswrapper[4685]: W0129 16:26:14.706874 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.707062 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:14 crc kubenswrapper[4685]: E0129 16:26:14.952093 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="1.6s" Jan 29 16:26:15 crc kubenswrapper[4685]: W0129 16:26:15.116504 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:15 crc kubenswrapper[4685]: E0129 16:26:15.116629 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.187254 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.189885 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.189942 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.189957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.189993 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:15 crc kubenswrapper[4685]: E0129 16:26:15.190662 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.416308 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 29 16:26:15 crc kubenswrapper[4685]: E0129 16:26:15.417975 4685 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.540210 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.546294 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 12:36:49.883637069 +0000 UTC Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.616636 4685 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19" exitCode=0 Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.616744 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.616768 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.618183 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.618224 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.618240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.619358 4685 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235" exitCode=0 Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.619687 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.620907 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.624301 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.624340 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.624362 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.628415 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.628490 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.628491 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.628644 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.628668 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.629752 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.629805 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.629828 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.631783 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1" exitCode=0 Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.631860 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.631894 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.633061 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.633101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.633120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.634218 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99" exitCode=0 Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.634282 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99"} Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.634329 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.635291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.635330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.635343 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.636858 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.638018 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.638045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.638057 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:15 crc kubenswrapper[4685]: I0129 16:26:15.647305 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.540654 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.546923 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 11:28:45.160444427 +0000 UTC Jan 29 16:26:16 crc kubenswrapper[4685]: E0129 16:26:16.553818 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="3.2s" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.641237 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.641421 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.641441 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.641453 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.643928 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6" exitCode=0 Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.643950 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.644040 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.644839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.644880 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.644896 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.646947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.647012 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.647676 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.647693 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.647701 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.650450 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.650765 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.650899 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.650984 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651000 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007"} Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651732 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651765 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651781 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651880 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651931 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.651942 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:16 crc kubenswrapper[4685]: W0129 16:26:16.678376 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:16 crc kubenswrapper[4685]: E0129 16:26:16.678514 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:16 crc kubenswrapper[4685]: W0129 16:26:16.708494 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:16 crc kubenswrapper[4685]: E0129 16:26:16.708612 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.791632 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.793009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.793077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.793091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:16 crc kubenswrapper[4685]: I0129 16:26:16.793128 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:16 crc kubenswrapper[4685]: E0129 16:26:16.794858 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Jan 29 16:26:17 crc kubenswrapper[4685]: W0129 16:26:17.289387 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Jan 29 16:26:17 crc kubenswrapper[4685]: E0129 16:26:17.289523 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.547714 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 01:57:39.41046302 +0000 UTC Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.655807 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a" exitCode=0 Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.655913 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a"} Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.656020 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.657116 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.657203 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.657268 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659470 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45"} Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659540 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659545 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659628 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659645 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.659833 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660634 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660665 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660677 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.660987 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661054 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661069 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:17 crc kubenswrapper[4685]: I0129 16:26:17.661483 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.276537 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.548590 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 12:54:59.586467349 +0000 UTC Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667028 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e"} Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667561 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842"} Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667613 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb"} Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667061 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667645 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857"} Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667781 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78"} Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667140 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667257 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.667880 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669572 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669623 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669637 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669585 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.669966 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.673185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.673782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:18 crc kubenswrapper[4685]: I0129 16:26:18.673813 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.549096 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 16:29:15.264841908 +0000 UTC Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.669382 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.670067 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.670091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.670099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.687659 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.996006 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.997480 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.997536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.997553 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:19 crc kubenswrapper[4685]: I0129 16:26:19.997585 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:20 crc kubenswrapper[4685]: I0129 16:26:20.549736 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 02:28:59.378760778 +0000 UTC Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.260645 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.260781 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.260844 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.261859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.261887 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.261911 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.279336 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.345496 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.345791 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.347066 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.347108 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.347120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.349348 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.349615 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.351118 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.351145 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.351153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.550808 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 16:52:37.953893599 +0000 UTC Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.675286 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.675356 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.676881 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.676970 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:21 crc kubenswrapper[4685]: I0129 16:26:21.676993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:22 crc kubenswrapper[4685]: I0129 16:26:22.551463 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 07:55:40.674972175 +0000 UTC Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.056194 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.056429 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.057704 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.057772 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.057789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.552098 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 06:54:29.952115834 +0000 UTC Jan 29 16:26:23 crc kubenswrapper[4685]: E0129 16:26:23.689727 4685 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.699193 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.699374 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.700265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.700293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.700305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:23 crc kubenswrapper[4685]: I0129 16:26:23.705028 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.289359 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.289678 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.290966 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.291010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.291020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.553070 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 18:52:57.881972583 +0000 UTC Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.681315 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.682121 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.682154 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.682166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:24 crc kubenswrapper[4685]: I0129 16:26:24.684999 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.484797 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.553930 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 22:26:38.338765671 +0000 UTC Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.683301 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.684440 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.684483 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:25 crc kubenswrapper[4685]: I0129 16:26:25.684494 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:26 crc kubenswrapper[4685]: I0129 16:26:26.554461 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 09:26:22.13289691 +0000 UTC Jan 29 16:26:26 crc kubenswrapper[4685]: I0129 16:26:26.685751 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:26 crc kubenswrapper[4685]: I0129 16:26:26.686761 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:26 crc kubenswrapper[4685]: I0129 16:26:26.686810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:26 crc kubenswrapper[4685]: I0129 16:26:26.686825 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.541006 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.555716 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 04:01:41.129035818 +0000 UTC Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.587487 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:46838->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.587569 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:46838->192.168.126.11:17697: read: connection reset by peer" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.693029 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.696221 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45" exitCode=255 Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.696271 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45"} Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.696501 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.697491 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.697543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.697559 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.698295 4685 scope.go:117] "RemoveContainer" containerID="11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45" Jan 29 16:26:27 crc kubenswrapper[4685]: W0129 16:26:27.885482 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 29 16:26:27 crc kubenswrapper[4685]: I0129 16:26:27.885594 4685 trace.go:236] Trace[823334738]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Jan-2026 16:26:17.884) (total time: 10001ms): Jan 29 16:26:27 crc kubenswrapper[4685]: Trace[823334738]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (16:26:27.885) Jan 29 16:26:27 crc kubenswrapper[4685]: Trace[823334738]: [10.001387414s] [10.001387414s] END Jan 29 16:26:27 crc kubenswrapper[4685]: E0129 16:26:27.885621 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.484849 4685 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.484950 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.512793 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.512864 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.518865 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.518956 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.556862 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 20:47:55.003582375 +0000 UTC Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.700325 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.701750 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b"} Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.701905 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.702630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.702670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:28 crc kubenswrapper[4685]: I0129 16:26:28.702679 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:29 crc kubenswrapper[4685]: I0129 16:26:29.556990 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 21:10:47.82918076 +0000 UTC Jan 29 16:26:30 crc kubenswrapper[4685]: I0129 16:26:30.557964 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 10:04:06.125218794 +0000 UTC Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.288133 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.288332 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.288444 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.289763 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.289791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.289839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.293129 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.558470 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 09:48:36.208335396 +0000 UTC Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.712810 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.713862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.713902 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:31 crc kubenswrapper[4685]: I0129 16:26:31.713924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:32 crc kubenswrapper[4685]: I0129 16:26:32.558954 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 09:07:57.785483392 +0000 UTC Jan 29 16:26:32 crc kubenswrapper[4685]: I0129 16:26:32.713795 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:32 crc kubenswrapper[4685]: I0129 16:26:32.714644 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:32 crc kubenswrapper[4685]: I0129 16:26:32.714691 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:32 crc kubenswrapper[4685]: I0129 16:26:32.714712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.515875 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.519100 4685 trace.go:236] Trace[290770918]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Jan-2026 16:26:21.786) (total time: 11732ms): Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[290770918]: ---"Objects listed" error: 11732ms (16:26:33.519) Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[290770918]: [11.732935773s] [11.732935773s] END Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.519133 4685 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.520962 4685 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.522735 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.522839 4685 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.522883 4685 trace.go:236] Trace[1179440505]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Jan-2026 16:26:20.148) (total time: 13373ms): Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[1179440505]: ---"Objects listed" error: 13373ms (16:26:33.522) Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[1179440505]: [13.373858852s] [13.373858852s] END Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.522902 4685 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.523367 4685 trace.go:236] Trace[1095452888]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Jan-2026 16:26:22.365) (total time: 11157ms): Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[1095452888]: ---"Objects listed" error: 11157ms (16:26:33.523) Jan 29 16:26:33 crc kubenswrapper[4685]: Trace[1095452888]: [11.157435515s] [11.157435515s] END Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.523404 4685 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.533014 4685 apiserver.go:52] "Watching apiserver" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.536196 4685 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.536452 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.536771 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.536818 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.536869 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.536887 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.538222 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.538298 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.538334 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.538343 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.538424 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.538761 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.539039 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.539420 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.539775 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.541169 4685 csr.go:261] certificate signing request csr-plcv4 is approved, waiting to be issued Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.542130 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.545200 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.545422 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.546053 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.546576 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.547910 4685 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.556527 4685 csr.go:257] certificate signing request csr-plcv4 is issued Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.560022 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 03:44:20.141575129 +0000 UTC Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.565630 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.580748 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.589087 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.596830 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.605361 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.616553 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621243 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621288 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621315 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621341 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621363 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621413 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621445 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621469 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621495 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621518 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621542 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621564 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621588 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621610 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621634 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621657 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621682 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621706 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621760 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621780 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621800 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621820 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621839 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621897 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621918 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621939 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621961 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621983 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622004 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622025 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621609 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622381 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621618 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621859 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622462 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.621847 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622087 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622486 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622100 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622245 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622327 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622519 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622340 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622364 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622559 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622650 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622699 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622733 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622743 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622773 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622792 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622891 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622981 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.622044 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623061 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623086 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623117 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623109 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623209 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623209 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623258 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623279 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623317 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623332 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623335 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623349 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623360 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623368 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623432 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623454 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623474 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623491 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623509 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623526 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623589 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623708 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623711 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623816 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623843 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623862 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623910 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623928 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623947 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.623879 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624008 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624008 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624028 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624044 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624100 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624118 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628197 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628224 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628242 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628262 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628286 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628302 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628317 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628332 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628347 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628363 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628380 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628420 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628436 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628451 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628466 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628498 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628517 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628535 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628552 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628566 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628581 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628601 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628573 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628620 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628636 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628651 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628667 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628685 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628702 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628719 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628735 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628751 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628765 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628787 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628802 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628817 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628832 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628862 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628883 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628901 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628917 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628932 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628947 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628964 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628980 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628996 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629011 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629026 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629041 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629058 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629073 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629089 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629120 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629140 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629155 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624096 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629171 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624101 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629187 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624139 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624152 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629203 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629226 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629244 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629261 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629278 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629294 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629310 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629325 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629341 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629357 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629372 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629387 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629416 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629432 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629449 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629467 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629498 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629514 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629534 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629549 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629565 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629583 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629599 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629617 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629634 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629654 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629675 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629696 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629720 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630483 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630500 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630517 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630534 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630551 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630573 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630590 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630611 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630627 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630642 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630687 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630707 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630724 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630740 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630755 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630771 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630786 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630803 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630823 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630840 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630857 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630873 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630891 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630908 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630988 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631023 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631043 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631062 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631078 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631094 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631110 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631127 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631143 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631161 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631179 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631195 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631210 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631227 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631243 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631259 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631275 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631291 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631308 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631327 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631344 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631360 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.631376 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632371 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632419 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632439 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632502 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632524 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632546 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632564 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632584 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632605 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632625 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632648 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632668 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632686 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632704 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632722 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632740 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632759 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632804 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632816 4685 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632826 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632836 4685 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632846 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632856 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632866 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632876 4685 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632886 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632896 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632908 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632917 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632928 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632942 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632952 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632967 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632976 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632990 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633000 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633010 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633020 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633030 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633039 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633048 4685 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633062 4685 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633071 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633081 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633092 4685 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633102 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633113 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633124 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633135 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633144 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633154 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633163 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.633172 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652367 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.654964 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655381 4685 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656018 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-qlnx7"] Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624270 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624293 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624260 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.624332 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628299 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.628892 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629153 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629208 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629273 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629372 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629476 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629564 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629788 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629821 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629840 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629853 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.629923 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630000 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630054 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630143 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657073 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630262 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657367 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657435 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.630308 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632665 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657503 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632741 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.632899 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.638917 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.640590 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.640701 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.640793 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.640834 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.640910 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641091 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641138 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641147 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641162 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641179 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641165 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641197 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641353 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641363 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641527 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641837 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641851 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.641891 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657764 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642061 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642263 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642563 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642583 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642799 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.642920 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.643269 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.643515 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.643778 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.643922 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644041 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644181 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644294 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644312 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644353 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644487 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644901 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644901 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.644946 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645006 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645121 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645218 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645558 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645788 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.645841 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646010 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646111 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646617 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646627 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.658040 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646822 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646954 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.646962 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.647101 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.647289 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.647910 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.648042 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.648258 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.648462 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.648903 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649114 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649242 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649255 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649276 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649509 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649847 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.649891 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.650536 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.650907 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.650957 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.651833 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652073 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652277 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652319 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652509 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.652884 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653100 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653272 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653701 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653751 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653781 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653650 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.653897 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.654006 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.654457 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.654578 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.654731 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655125 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655140 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655474 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655858 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.655992 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656116 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656182 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656521 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656807 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.657442 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.658092 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.658527 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.659035 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:34.159011522 +0000 UTC m=+21.356367784 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.659043 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.659138 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:34.159115084 +0000 UTC m=+21.356471356 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.659337 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.659609 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.659689 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.660032 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.660089 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.660437 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.660498 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.661012 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.661661 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:26:34.161645068 +0000 UTC m=+21.359001340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.656623 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.661911 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.662095 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.662341 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.662540 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.663017 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.663239 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.663467 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.664895 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.666578 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.666827 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.670264 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.672076 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.675268 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.676584 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.676756 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.677013 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.677526 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.677698 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.678506 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.678537 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.678555 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.678628 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:34.178606078 +0000 UTC m=+21.375962400 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.678860 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.679380 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.680112 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.680163 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.681193 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.681222 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.681237 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.681627 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.682094 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.682252 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.683274 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.685148 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: E0129 16:26:33.685481 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:34.18545913 +0000 UTC m=+21.382815392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.686229 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.686478 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.698941 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.700159 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.708978 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.711244 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.717677 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.719980 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.727372 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735351 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735485 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735559 4685 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735576 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735587 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735599 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735611 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735625 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735637 4685 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735641 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735649 4685 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735687 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735699 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735711 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735722 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735733 4685 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735745 4685 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735756 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735767 4685 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735778 4685 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735789 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735800 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735811 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735822 4685 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735834 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735845 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735859 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735869 4685 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735881 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735893 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735903 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735914 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735949 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735977 4685 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735988 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735997 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736011 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736022 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736034 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736045 4685 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736055 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735610 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736067 4685 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736169 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736186 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736200 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736237 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736251 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736266 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736279 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736291 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736304 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736342 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736358 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736371 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736384 4685 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736411 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736424 4685 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736436 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736449 4685 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736487 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736500 4685 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736513 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736525 4685 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736537 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736549 4685 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736562 4685 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736574 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736586 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736597 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736609 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736622 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736635 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736648 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736658 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736669 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736682 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736694 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736706 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736719 4685 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736730 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736741 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736754 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736765 4685 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736804 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736817 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736827 4685 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736842 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.735946 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736854 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736897 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736913 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736926 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736938 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736979 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.736993 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737004 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737015 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737053 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737067 4685 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737079 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737091 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737102 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737142 4685 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737154 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737169 4685 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737180 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737219 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737234 4685 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737258 4685 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737269 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737309 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737321 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737332 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737344 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737356 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737407 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737421 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737432 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737469 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737482 4685 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737494 4685 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737509 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737522 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737557 4685 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737569 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737580 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737591 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737602 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737612 4685 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737625 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737637 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737649 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737660 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737674 4685 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737687 4685 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737700 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737714 4685 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737726 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737739 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737751 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737764 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737776 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737787 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737802 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737814 4685 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737826 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737839 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737852 4685 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737865 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737879 4685 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737901 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737912 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737924 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737937 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737957 4685 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737969 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737980 4685 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.737992 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.738006 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.738017 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.738029 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.738041 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.747224 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.757797 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.765089 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.772957 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.782546 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.791170 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.799463 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.806947 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.812886 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.822818 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.833883 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.838834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d2ab808a-6378-45c6-98a8-f48acf2d9235-hosts-file\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.839006 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjd4g\" (UniqueName: \"kubernetes.io/projected/d2ab808a-6378-45c6-98a8-f48acf2d9235-kube-api-access-rjd4g\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.867115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.867201 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.867702 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.941582 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjd4g\" (UniqueName: \"kubernetes.io/projected/d2ab808a-6378-45c6-98a8-f48acf2d9235-kube-api-access-rjd4g\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.941952 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d2ab808a-6378-45c6-98a8-f48acf2d9235-hosts-file\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.942025 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d2ab808a-6378-45c6-98a8-f48acf2d9235-hosts-file\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:33 crc kubenswrapper[4685]: I0129 16:26:33.957772 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjd4g\" (UniqueName: \"kubernetes.io/projected/d2ab808a-6378-45c6-98a8-f48acf2d9235-kube-api-access-rjd4g\") pod \"node-resolver-qlnx7\" (UID: \"d2ab808a-6378-45c6-98a8-f48acf2d9235\") " pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.004420 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pjcfc"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.005069 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qlnx7" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.005205 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.005614 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-r99mt"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.005912 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8dlmj"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.005974 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.006122 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-kbcwf"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.006309 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.006375 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-pxv5d"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.006691 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.007459 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.008873 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.009119 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.009481 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011554 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011600 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011554 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011707 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011714 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.011871 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.012182 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.012369 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.012597 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.013769 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.013950 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014170 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014209 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014173 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014301 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014339 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.014361 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.015159 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.015352 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.015533 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.024563 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.030932 4685 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.035662 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.049141 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.064789 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.087729 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.103558 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.119213 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.128991 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.137911 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.144902 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146594 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146627 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-os-release\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146643 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146660 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cni-binary-copy\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146675 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-k8s-cni-cncf-io\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146688 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-conf-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146707 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146722 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146736 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6ceb996f-40db-4a5d-915d-dd0f6d926751-rootfs\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146752 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-system-cni-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146769 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146787 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dcxr\" (UniqueName: \"kubernetes.io/projected/0319ef8a-068b-4985-a398-8648207e77a1-kube-api-access-7dcxr\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146802 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6ceb996f-40db-4a5d-915d-dd0f6d926751-proxy-tls\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146820 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6ceb996f-40db-4a5d-915d-dd0f6d926751-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146835 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-kubelet\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146850 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-daemon-config\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146876 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146890 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146908 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-netns\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146927 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146941 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146954 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146966 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.146981 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cnibin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147002 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147015 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147030 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147045 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0319ef8a-068b-4985-a398-8648207e77a1-serviceca\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147059 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147074 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jskgn\" (UniqueName: \"kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147090 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cnibin\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147107 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147123 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147283 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnwhn\" (UniqueName: \"kubernetes.io/projected/6ceb996f-40db-4a5d-915d-dd0f6d926751-kube-api-access-cnwhn\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.147309 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjffv\" (UniqueName: \"kubernetes.io/projected/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-kube-api-access-vjffv\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.149921 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlcvd\" (UniqueName: \"kubernetes.io/projected/d75c6b72-980a-4110-b259-c4b54f0ea9b2-kube-api-access-tlcvd\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.149972 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.149992 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150007 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-system-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150021 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-multus-certs\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150038 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0319ef8a-068b-4985-a398-8648207e77a1-host\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150057 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150072 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-etc-kubernetes\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150094 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-binary-copy\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150109 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-os-release\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150123 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-bin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150138 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-multus\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150173 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150188 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-socket-dir-parent\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150202 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.150214 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-hostroot\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.153570 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.163412 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.171131 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.180799 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.194855 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.207199 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.215216 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.223689 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.244483 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250524 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250608 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250639 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250656 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.250694 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.250665968 +0000 UTC m=+22.448022250 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250742 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-netns\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250757 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250780 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.250799 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250837 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250834 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-netns\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250858 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250804 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.250883 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.250863863 +0000 UTC m=+22.448220125 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250900 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250925 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250947 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cnibin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250972 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250987 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cnibin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250949 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251016 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251036 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.251040 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.250922 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251043 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.251113 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.251097609 +0000 UTC m=+22.448453871 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251165 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0319ef8a-068b-4985-a398-8648207e77a1-serviceca\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251202 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251240 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jskgn\" (UniqueName: \"kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251257 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cnibin\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251268 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251274 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251310 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251559 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnwhn\" (UniqueName: \"kubernetes.io/projected/6ceb996f-40db-4a5d-915d-dd0f6d926751-kube-api-access-cnwhn\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251586 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjffv\" (UniqueName: \"kubernetes.io/projected/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-kube-api-access-vjffv\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251608 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlcvd\" (UniqueName: \"kubernetes.io/projected/d75c6b72-980a-4110-b259-c4b54f0ea9b2-kube-api-access-tlcvd\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251630 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251645 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251660 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-system-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251676 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-multus-certs\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251692 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0319ef8a-068b-4985-a398-8648207e77a1-host\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251709 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251725 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-etc-kubernetes\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251744 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-binary-copy\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251758 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-os-release\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251772 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-bin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251785 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-multus\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251806 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251858 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-socket-dir-parent\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251903 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251926 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-hostroot\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251944 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251963 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-os-release\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251984 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252000 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cni-binary-copy\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252005 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252013 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-k8s-cni-cncf-io\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252026 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cnibin\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252033 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-conf-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252078 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-bin\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252109 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-cni-multus\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252160 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252171 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252185 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252185 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0319ef8a-068b-4985-a398-8648207e77a1-serviceca\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252197 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.251965 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-os-release\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252237 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-system-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252247 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-cni-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252316 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252338 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252352 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252432 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252443 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252454 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.252441852 +0000 UTC m=+22.449798194 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252464 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252486 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252520 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6ceb996f-40db-4a5d-915d-dd0f6d926751-rootfs\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.252526 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.252517504 +0000 UTC m=+22.449873766 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252545 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-system-cni-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252560 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252571 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252591 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dcxr\" (UniqueName: \"kubernetes.io/projected/0319ef8a-068b-4985-a398-8648207e77a1-kube-api-access-7dcxr\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252599 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252626 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-hostroot\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252661 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252657 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0319ef8a-068b-4985-a398-8648207e77a1-host\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252693 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6ceb996f-40db-4a5d-915d-dd0f6d926751-proxy-tls\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252698 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-multus-certs\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252718 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-os-release\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252724 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6ceb996f-40db-4a5d-915d-dd0f6d926751-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252739 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252747 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-kubelet\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252766 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-etc-kubernetes\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252767 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-daemon-config\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252949 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-conf-dir\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252969 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-run-k8s-cni-cncf-io\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253011 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-system-cni-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.252549 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6ceb996f-40db-4a5d-915d-dd0f6d926751-rootfs\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253363 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253374 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-host-var-lib-kubelet\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253372 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253479 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-daemon-config\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253594 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-cni-binary-copy\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253717 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c6b72-980a-4110-b259-c4b54f0ea9b2-cni-binary-copy\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.253805 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c6b72-980a-4110-b259-c4b54f0ea9b2-multus-socket-dir-parent\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.254192 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6ceb996f-40db-4a5d-915d-dd0f6d926751-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.256580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6ceb996f-40db-4a5d-915d-dd0f6d926751-proxy-tls\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.256650 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.292977 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jskgn\" (UniqueName: \"kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn\") pod \"ovnkube-node-pjcfc\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.311596 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dcxr\" (UniqueName: \"kubernetes.io/projected/0319ef8a-068b-4985-a398-8648207e77a1-kube-api-access-7dcxr\") pod \"node-ca-r99mt\" (UID: \"0319ef8a-068b-4985-a398-8648207e77a1\") " pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.311957 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.323537 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.326030 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.334955 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjffv\" (UniqueName: \"kubernetes.io/projected/5cf07450-4dbf-44c7-a654-3ea7347ac7ac-kube-api-access-vjffv\") pod \"multus-additional-cni-plugins-pxv5d\" (UID: \"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\") " pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: W0129 16:26:34.337715 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00a79737_7411_4a3e_8c27_e1e16951a730.slice/crio-ae142fc9ca2dc4c04f0d0c1aa5f3ebdedad8db72fd338c7dfb257c0d5019483d WatchSource:0}: Error finding container ae142fc9ca2dc4c04f0d0c1aa5f3ebdedad8db72fd338c7dfb257c0d5019483d: Status 404 returned error can't find the container with id ae142fc9ca2dc4c04f0d0c1aa5f3ebdedad8db72fd338c7dfb257c0d5019483d Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.343261 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-r99mt" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.354140 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlcvd\" (UniqueName: \"kubernetes.io/projected/d75c6b72-980a-4110-b259-c4b54f0ea9b2-kube-api-access-tlcvd\") pod \"multus-8dlmj\" (UID: \"d75c6b72-980a-4110-b259-c4b54f0ea9b2\") " pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.372275 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rpctp"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.372788 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.372863 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.374653 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.377978 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnwhn\" (UniqueName: \"kubernetes.io/projected/6ceb996f-40db-4a5d-915d-dd0f6d926751-kube-api-access-cnwhn\") pod \"machine-config-daemon-kbcwf\" (UID: \"6ceb996f-40db-4a5d-915d-dd0f6d926751\") " pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.385576 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.445949 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.454683 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqclw\" (UniqueName: \"kubernetes.io/projected/28e2de4c-c48c-4160-87df-b5a8d213a203-kube-api-access-dqclw\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.454783 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.477481 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.483638 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.535057 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.555344 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.555455 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqclw\" (UniqueName: \"kubernetes.io/projected/28e2de4c-c48c-4160-87df-b5a8d213a203-kube-api-access-dqclw\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.555524 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.555605 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:35.055586501 +0000 UTC m=+22.252942773 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.558037 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-29 16:21:33 +0000 UTC, rotation deadline is 2026-11-10 02:43:49.339964405 +0000 UTC Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.558075 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6826h17m14.781891992s for next certificate rotation Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.561008 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 15:54:02.415321592 +0000 UTC Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.564244 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.600373 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqclw\" (UniqueName: \"kubernetes.io/projected/28e2de4c-c48c-4160-87df-b5a8d213a203-kube-api-access-dqclw\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.623327 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.652012 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8dlmj" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.664711 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.671978 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: W0129 16:26:34.702851 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ceb996f_40db_4a5d_915d_dd0f6d926751.slice/crio-c7cf37501ae8bdb3ec888d2f0c4bc5099f1ca7192cc5b9fb4f66aa738b58965b WatchSource:0}: Error finding container c7cf37501ae8bdb3ec888d2f0c4bc5099f1ca7192cc5b9fb4f66aa738b58965b: Status 404 returned error can't find the container with id c7cf37501ae8bdb3ec888d2f0c4bc5099f1ca7192cc5b9fb4f66aa738b58965b Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.718593 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.719767 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.719890 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.719997 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c49c9b4102a1e789e2cb8fc60d9203b45011f6f190dcee499bb65a91d686a795"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.722286 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"c7cf37501ae8bdb3ec888d2f0c4bc5099f1ca7192cc5b9fb4f66aa738b58965b"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.723653 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" exitCode=0 Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.723754 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.723832 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"ae142fc9ca2dc4c04f0d0c1aa5f3ebdedad8db72fd338c7dfb257c0d5019483d"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.725674 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qlnx7" event={"ID":"d2ab808a-6378-45c6-98a8-f48acf2d9235","Type":"ContainerStarted","Data":"60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.725767 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qlnx7" event={"ID":"d2ab808a-6378-45c6-98a8-f48acf2d9235","Type":"ContainerStarted","Data":"8fed5b4e212c1a36731c2e33e4099a36699f29d5127012a07db21d9f252252ed"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.727763 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerStarted","Data":"cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.727804 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerStarted","Data":"6a65e3e20396fe6f9d9ac422603296a56234815faf6cf70afd42ac3122f9aaaa"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.730031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.730051 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"352e0d37ddfbb1d47b96447cfe25be59e74ea1e0dd495d8f6bde6adb9e373633"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.731250 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerStarted","Data":"1a31c9a58bae83127e43d96ae38210c69e465ec36f847aecc0ce23ec87bee755"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.733047 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-r99mt" event={"ID":"0319ef8a-068b-4985-a398-8648207e77a1","Type":"ContainerStarted","Data":"3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.733182 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-r99mt" event={"ID":"0319ef8a-068b-4985-a398-8648207e77a1","Type":"ContainerStarted","Data":"d140bcd4d3122222072f45ce0a6b8957b13689671bae493ded2da86c9431fe48"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.736719 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.737628 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.739567 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" exitCode=255 Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.739599 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.739676 4685 scope.go:117] "RemoveContainer" containerID="11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.740597 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bd1dc5fc43324ee3490f4132aac87b2005395b4bc32ec96b3766cddc69c6bdca"} Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.749777 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.820710 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.849873 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.877143 4685 scope.go:117] "RemoveContainer" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" Jan 29 16:26:34 crc kubenswrapper[4685]: E0129 16:26:34.877495 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.877925 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.888514 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.925038 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:34 crc kubenswrapper[4685]: I0129 16:26:34.968432 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.016440 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.047555 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.059959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.060093 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.060140 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:36.060125704 +0000 UTC m=+23.257481956 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.099272 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.138225 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.180557 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.220113 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.248574 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.261742 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.261959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.262009 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.262056 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.262096 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262191 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262250 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:37.262227186 +0000 UTC m=+24.459583458 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262328 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:26:37.262316778 +0000 UTC m=+24.459673040 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262467 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262491 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262506 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262543 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:37.262533194 +0000 UTC m=+24.459889466 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262625 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262645 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262657 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262700 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:37.262687258 +0000 UTC m=+24.460043520 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262770 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.262813 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:37.262804911 +0000 UTC m=+24.460161173 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.291657 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.326684 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.370620 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.408292 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:27Z\\\",\\\"message\\\":\\\"W0129 16:26:16.870188 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0129 16:26:16.870638 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769703976 cert, and key in /tmp/serving-cert-95494591/serving-signer.crt, /tmp/serving-cert-95494591/serving-signer.key\\\\nI0129 16:26:17.269333 1 observer_polling.go:159] Starting file observer\\\\nW0129 16:26:17.272315 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0129 16:26:17.272490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:17.273224 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-95494591/tls.crt::/tmp/serving-cert-95494591/tls.key\\\\\\\"\\\\nF0129 16:26:27.580356 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.446453 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.486434 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.489383 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.492716 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.526834 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.544243 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.562162 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 22:12:59.373197458 +0000 UTC Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.585830 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.601738 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.601852 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.601838 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.601965 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.602081 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.602199 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.607287 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.608352 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.609810 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.610711 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.612013 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.612795 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.613625 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.615306 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.616277 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.617874 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.618820 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.620964 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.621889 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.622617 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.626237 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.626898 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.628476 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.629019 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.629716 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.630999 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.631829 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.633018 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.633502 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.633867 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.635326 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.635971 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.636901 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.639249 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.639868 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.641122 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.641814 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.642892 4685 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.643009 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.644952 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.646255 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.646859 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.648751 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.649683 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.650811 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.651669 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.652894 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.653483 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.654613 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.655415 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.656567 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.658517 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.659781 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.660599 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.662006 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.662639 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.663909 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.664489 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.665740 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.666449 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.667023 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.677758 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.704840 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.744561 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.745116 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff" exitCode=0 Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.745207 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749080 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749093 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749104 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749113 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.749123 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.750786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.750865 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.752238 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerStarted","Data":"8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f"} Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.753717 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.755820 4685 scope.go:117] "RemoveContainer" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" Jan 29 16:26:35 crc kubenswrapper[4685]: E0129 16:26:35.755997 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.789894 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.832249 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11e1b7e8eb3aad610d7a3ba6ed5026bd0ea7b777bb1a4ebf2dc8832ba45e0a45\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:27Z\\\",\\\"message\\\":\\\"W0129 16:26:16.870188 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0129 16:26:16.870638 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769703976 cert, and key in /tmp/serving-cert-95494591/serving-signer.crt, /tmp/serving-cert-95494591/serving-signer.key\\\\nI0129 16:26:17.269333 1 observer_polling.go:159] Starting file observer\\\\nW0129 16:26:17.272315 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0129 16:26:17.272490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:17.273224 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-95494591/tls.crt::/tmp/serving-cert-95494591/tls.key\\\\\\\"\\\\nF0129 16:26:27.580356 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.868232 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.925058 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.960883 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:35 crc kubenswrapper[4685]: I0129 16:26:35.996190 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:35Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.028517 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.067434 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.072799 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:36 crc kubenswrapper[4685]: E0129 16:26:36.072981 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:36 crc kubenswrapper[4685]: E0129 16:26:36.073095 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:38.073070977 +0000 UTC m=+25.270427249 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.104651 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.151145 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.186059 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.228807 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.273360 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.307213 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.349981 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.387858 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.426614 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.466940 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.506145 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.543729 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.563109 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 15:18:11.903982833 +0000 UTC Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.587550 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.601365 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:36 crc kubenswrapper[4685]: E0129 16:26:36.601568 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.625725 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.668077 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.706337 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.745326 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.760573 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0"} Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.762898 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerStarted","Data":"5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc"} Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.791160 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.835375 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.864904 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.911798 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.947053 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:36 crc kubenswrapper[4685]: I0129 16:26:36.988900 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:36Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.025963 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.066549 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.113791 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.145805 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.183836 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.226349 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.252072 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.252696 4685 scope.go:117] "RemoveContainer" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.252837 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.265750 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.291381 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.291589 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:26:41.291545339 +0000 UTC m=+28.488901631 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.291661 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.291689 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.291747 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.291778 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.291860 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.291903 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:41.291896408 +0000 UTC m=+28.489252670 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292018 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292037 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292047 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292065 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292090 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:41.292076312 +0000 UTC m=+28.489432574 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292157 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:41.292124113 +0000 UTC m=+28.489480455 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292076 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292184 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292197 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.292232 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:41.292217556 +0000 UTC m=+28.489573818 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.307722 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.367034 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.401232 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.430630 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.468454 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.508735 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.563400 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 13:44:06.943998677 +0000 UTC Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.601019 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.601057 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.601075 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.601156 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.601268 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:37 crc kubenswrapper[4685]: E0129 16:26:37.601670 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.769509 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc" exitCode=0 Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.769633 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc"} Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.776029 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.795290 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.808083 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.824465 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.849133 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.871623 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.889228 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.907634 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.921299 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.936274 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.947450 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.960489 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:37 crc kubenswrapper[4685]: I0129 16:26:37.985308 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:37Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.064900 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.082294 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.098287 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:38 crc kubenswrapper[4685]: E0129 16:26:38.098412 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:38 crc kubenswrapper[4685]: E0129 16:26:38.098467 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:42.09844395 +0000 UTC m=+29.295800212 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.107319 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.145946 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.563781 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 23:16:08.391617646 +0000 UTC Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.601838 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:38 crc kubenswrapper[4685]: E0129 16:26:38.601977 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.782260 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211" exitCode=0 Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.782337 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211"} Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.811370 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.828841 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.839360 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.848673 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.862240 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.881159 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.892272 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.902897 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.913457 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.927880 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.939187 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.951115 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.964114 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.983339 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:38 crc kubenswrapper[4685]: I0129 16:26:38.995119 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:38Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.012988 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.564928 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 14:44:21.920818633 +0000 UTC Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.600651 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.600764 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.600668 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.600824 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.600656 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.600869 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.789707 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1" exitCode=0 Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.789786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1"} Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.797280 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b"} Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.797678 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.807531 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.823313 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.829748 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.845140 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.863773 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.880768 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.895673 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.923257 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.925922 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.925971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.926004 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.926193 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.926805 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.935841 4685 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.936280 4685 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.937919 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.937975 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.937991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.938014 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.938092 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:39Z","lastTransitionTime":"2026-01-29T16:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.943908 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.959345 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.965217 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.965253 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.965263 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.965280 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.965291 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:39Z","lastTransitionTime":"2026-01-29T16:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.969864 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.976326 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.981505 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.981537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.981551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.981566 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.981576 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:39Z","lastTransitionTime":"2026-01-29T16:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:39 crc kubenswrapper[4685]: I0129 16:26:39.988780 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:39 crc kubenswrapper[4685]: E0129 16:26:39.996605 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:39Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.002922 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.003021 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.003083 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.003165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.003260 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.006115 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: E0129 16:26:40.017568 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.018362 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.023079 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.023105 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.023113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.023128 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.023138 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.033528 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: E0129 16:26:40.038692 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: E0129 16:26:40.038872 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.041149 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.041181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.041199 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.041220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.041234 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.047521 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.066342 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.084482 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.113183 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.130473 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.144822 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.145180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.145301 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.145411 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.145508 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.149589 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.165802 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.184139 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.202621 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.224175 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.241089 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.248136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.248161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.248174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.248194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.248206 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.262299 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.279011 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.292501 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.307426 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.328964 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.346175 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.350534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.350563 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.350573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.350591 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.350603 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.362210 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.377002 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.454620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.454837 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.455167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.455201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.455218 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.558239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.558287 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.558302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.558325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.558343 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.566220 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 05:27:40.631650065 +0000 UTC Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.601646 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:40 crc kubenswrapper[4685]: E0129 16:26:40.601826 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.661086 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.661131 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.661159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.661180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.661191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.764807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.765463 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.766074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.766192 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.766254 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.802071 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.802203 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.838618 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.856673 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.875720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.875810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.875830 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.875858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.875877 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.876536 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.901538 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.919695 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.937323 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.971263 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.978534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.978570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.978578 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.978596 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.978607 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:40Z","lastTransitionTime":"2026-01-29T16:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:40 crc kubenswrapper[4685]: I0129 16:26:40.991229 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:40Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.007552 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.022534 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.037581 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.054774 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.070511 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.081259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.081319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.081335 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.081363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.081382 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.088272 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.111270 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.128731 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.145817 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.184445 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.184497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.184510 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.184530 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.184543 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.286758 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.286794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.286803 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.286818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.286828 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.337900 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338127 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:26:49.33809584 +0000 UTC m=+36.535452102 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.338208 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.338249 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.338304 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.338428 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338492 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338540 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338655 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338707 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338561 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338723 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338742 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338762 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338713 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:49.338669674 +0000 UTC m=+36.536026056 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338872 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:49.338815488 +0000 UTC m=+36.536171790 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338926 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:49.33890915 +0000 UTC m=+36.536265442 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.338989 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:49.338965762 +0000 UTC m=+36.536322164 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.390409 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.390471 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.390486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.390515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.390532 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.493108 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.493159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.493172 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.493191 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.493203 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.567042 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 15:56:11.603242823 +0000 UTC Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.596904 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.596971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.596985 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.597008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.597022 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.601298 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.601388 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.601522 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.601602 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.601775 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:41 crc kubenswrapper[4685]: E0129 16:26:41.601956 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.700309 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.700380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.700447 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.700482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.700508 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.803265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.803316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.803332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.803355 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.803374 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.816233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerStarted","Data":"359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.838918 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.857718 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.871893 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.883960 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.896244 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911408 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911675 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911691 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.911725 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:41Z","lastTransitionTime":"2026-01-29T16:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.927292 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.940825 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.955875 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.974475 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:41 crc kubenswrapper[4685]: I0129 16:26:41.988564 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.001104 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.013988 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.014026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.014037 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.014052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.014062 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.014075 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.038846 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.065328 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.087011 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.116983 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.117028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.117040 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.117058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.117071 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.154311 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:42 crc kubenswrapper[4685]: E0129 16:26:42.154515 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:42 crc kubenswrapper[4685]: E0129 16:26:42.154592 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:26:50.154574674 +0000 UTC m=+37.351930946 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.220385 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.220468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.220481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.220503 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.220520 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.323602 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.323699 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.323716 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.323739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.323754 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.427040 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.427085 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.427111 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.427134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.427148 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.529100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.529145 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.529160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.529179 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.529195 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.567293 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 16:47:29.636102443 +0000 UTC Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.601737 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:42 crc kubenswrapper[4685]: E0129 16:26:42.601936 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.632090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.632134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.632147 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.632165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.632177 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.735364 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.735436 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.735450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.735467 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.735480 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.826008 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597" exitCode=0 Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.826090 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.838523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.838589 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.838608 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.838635 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.838654 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.852021 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.874233 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.893987 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.906164 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.921803 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.934604 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.940443 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.940464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.940472 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.940485 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.940493 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:42Z","lastTransitionTime":"2026-01-29T16:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.951299 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.966497 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.978670 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:42 crc kubenswrapper[4685]: I0129 16:26:42.992906 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:42Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.009770 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.019336 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.033134 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.043682 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.043744 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.043762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.043785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.043802 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.045595 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.058750 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.070955 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.145983 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.146029 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.146038 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.146052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.146061 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.248330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.248381 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.248417 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.248440 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.248459 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.350156 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.350198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.350209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.350228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.350240 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.359756 4685 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.452638 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.452682 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.452695 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.452714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.452726 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.554887 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.555241 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.555384 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.555605 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.555861 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.568317 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 13:17:38.078554961 +0000 UTC Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.601023 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:43 crc kubenswrapper[4685]: E0129 16:26:43.601139 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.601024 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.601024 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:43 crc kubenswrapper[4685]: E0129 16:26:43.601233 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:43 crc kubenswrapper[4685]: E0129 16:26:43.601378 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.633903 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.646302 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.658943 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.658997 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.659014 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.659041 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.659061 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.663979 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.676831 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.699063 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.715929 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.728820 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.740328 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.755473 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.761134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.761158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.761167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.761186 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.761203 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.773119 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.786628 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.797443 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.809572 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.822296 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.832444 4685 generic.go:334] "Generic (PLEG): container finished" podID="5cf07450-4dbf-44c7-a654-3ea7347ac7ac" containerID="0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af" exitCode=0 Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.832523 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerDied","Data":"0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.834181 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.844704 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.863977 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.864586 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.864614 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.864625 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.864660 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.864673 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.877136 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.888668 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.897897 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.908269 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.921332 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.932139 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.943335 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.959933 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.967022 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.967052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.967063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.967076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.967085 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:43Z","lastTransitionTime":"2026-01-29T16:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.973956 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:43 crc kubenswrapper[4685]: I0129 16:26:43.993056 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.007053 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.020381 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.039512 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.048549 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.057815 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.069254 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.069293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.069302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.069316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.069325 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.171158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.171195 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.171206 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.171221 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.171232 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.273415 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.273464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.273478 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.273495 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.273505 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.380357 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.380423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.380438 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.380454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.380464 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.483116 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.483167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.483183 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.483207 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.483224 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.569038 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 01:33:53.226376517 +0000 UTC Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.586764 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.586824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.586840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.586864 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.586882 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.601181 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:44 crc kubenswrapper[4685]: E0129 16:26:44.601333 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.690094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.690140 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.690154 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.690172 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.690184 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.792070 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.792125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.792138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.792155 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.792755 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.840016 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" event={"ID":"5cf07450-4dbf-44c7-a654-3ea7347ac7ac","Type":"ContainerStarted","Data":"9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.863043 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.880829 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.896497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.896575 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.896589 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.896621 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.896635 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.900053 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.922091 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.939017 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.952979 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.967547 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.978650 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.993627 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:44Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.999441 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.999482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.999506 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.999527 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:44 crc kubenswrapper[4685]: I0129 16:26:44.999538 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:44Z","lastTransitionTime":"2026-01-29T16:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.009785 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.020424 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.031319 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.049211 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.064667 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.082168 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.097818 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.102279 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.102319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.102332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.102353 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.102366 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.204915 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.204961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.204976 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.204993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.205008 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.308290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.308361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.308372 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.308411 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.308424 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.411477 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.411545 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.411563 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.411588 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.411605 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.515424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.515679 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.515697 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.515724 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.515743 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.569351 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 19:10:05.384426832 +0000 UTC Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.600859 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.600939 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.600881 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:45 crc kubenswrapper[4685]: E0129 16:26:45.601025 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:45 crc kubenswrapper[4685]: E0129 16:26:45.601127 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:45 crc kubenswrapper[4685]: E0129 16:26:45.601239 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.618557 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.618617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.618636 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.618660 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.618678 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.722035 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.722101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.722113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.722138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.722150 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.825844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.825902 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.825916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.825940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.825954 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.848821 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/0.log" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.854046 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b" exitCode=1 Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.854097 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.855872 4685 scope.go:117] "RemoveContainer" containerID="56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.879194 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.899316 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.915164 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.928756 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.928827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.928840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.928894 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.928910 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:45Z","lastTransitionTime":"2026-01-29T16:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.929857 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.946862 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.981939 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:45 crc kubenswrapper[4685]: I0129 16:26:45.995895 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:45Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.013831 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.026421 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.031298 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.031337 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.031346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.031365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.031374 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.044505 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.061189 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.072762 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.085169 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.113732 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:45Z\\\",\\\"message\\\":\\\"inpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.003920 5907 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0129 16:26:45.004013 5907 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004161 5907 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004282 5907 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004683 5907 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004759 5907 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0129 16:26:45.005086 5907 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.133841 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.134864 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.134937 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.134958 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.135735 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.135802 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.159378 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:46Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.239408 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.239459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.239473 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.239497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.239512 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.342627 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.343365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.343786 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.343982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.344170 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.447484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.447568 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.447597 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.447627 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.447649 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.550651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.550991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.551087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.551175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.551253 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.570510 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 13:04:04.505404535 +0000 UTC Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.601105 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:46 crc kubenswrapper[4685]: E0129 16:26:46.601343 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.654164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.654226 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.654240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.654265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.654279 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.756938 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.757458 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.757469 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.757488 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.757503 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.859998 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.860079 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.860093 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.860114 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.860127 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.962356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.962491 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.962505 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.962528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:46 crc kubenswrapper[4685]: I0129 16:26:46.962541 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:46Z","lastTransitionTime":"2026-01-29T16:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.064853 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.064895 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.064905 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.064921 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.064930 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.167680 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.167726 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.167738 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.167757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.167770 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.271365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.271464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.271478 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.271844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.271890 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.369926 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5"] Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.371878 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.373596 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.373725 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.374459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.374495 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.374509 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.374522 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.374532 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.387530 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.409028 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.417425 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82rbs\" (UniqueName: \"kubernetes.io/projected/65c9dddd-ada6-4134-af1e-d725cc23c354-kube-api-access-82rbs\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.417490 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.417542 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65c9dddd-ada6-4134-af1e-d725cc23c354-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.417567 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.422016 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.433628 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.470169 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.476309 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.476355 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.476374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.476418 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.476434 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.486221 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.504318 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.516409 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.517953 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82rbs\" (UniqueName: \"kubernetes.io/projected/65c9dddd-ada6-4134-af1e-d725cc23c354-kube-api-access-82rbs\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.517996 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.518029 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.518045 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65c9dddd-ada6-4134-af1e-d725cc23c354-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.519296 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.519492 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65c9dddd-ada6-4134-af1e-d725cc23c354-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.524771 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65c9dddd-ada6-4134-af1e-d725cc23c354-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.530056 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.539465 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82rbs\" (UniqueName: \"kubernetes.io/projected/65c9dddd-ada6-4134-af1e-d725cc23c354-kube-api-access-82rbs\") pod \"ovnkube-control-plane-749d76644c-xgsh5\" (UID: \"65c9dddd-ada6-4134-af1e-d725cc23c354\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.539570 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.557269 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.571152 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 06:05:23.390104538 +0000 UTC Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.571890 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.578749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.578782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.578794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.578807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.578818 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.587744 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.601729 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.601745 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.601727 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:47 crc kubenswrapper[4685]: E0129 16:26:47.601845 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:47 crc kubenswrapper[4685]: E0129 16:26:47.601977 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:47 crc kubenswrapper[4685]: E0129 16:26:47.602085 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.610758 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.631756 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:45Z\\\",\\\"message\\\":\\\"inpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.003920 5907 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0129 16:26:45.004013 5907 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004161 5907 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004282 5907 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004683 5907 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004759 5907 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0129 16:26:45.005086 5907 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.644193 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.657448 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:47Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.681191 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.681235 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.681251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.681271 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.681282 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.695669 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" Jan 29 16:26:47 crc kubenswrapper[4685]: W0129 16:26:47.710806 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65c9dddd_ada6_4134_af1e_d725cc23c354.slice/crio-23706a6c5fd2f9426a9bf32fba577dadce52f37a1eac8e80857550c8d8ed57dc WatchSource:0}: Error finding container 23706a6c5fd2f9426a9bf32fba577dadce52f37a1eac8e80857550c8d8ed57dc: Status 404 returned error can't find the container with id 23706a6c5fd2f9426a9bf32fba577dadce52f37a1eac8e80857550c8d8ed57dc Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.784412 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.784475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.784497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.784526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.784540 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.862990 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/0.log" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.865779 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.866606 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" event={"ID":"65c9dddd-ada6-4134-af1e-d725cc23c354","Type":"ContainerStarted","Data":"23706a6c5fd2f9426a9bf32fba577dadce52f37a1eac8e80857550c8d8ed57dc"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.892180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.892428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.892514 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.892666 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.892742 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.998134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.998217 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.998240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.998274 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:47 crc kubenswrapper[4685]: I0129 16:26:47.998305 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:47Z","lastTransitionTime":"2026-01-29T16:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.102949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.103020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.103036 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.103057 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.103074 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.205803 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.205863 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.205879 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.205903 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.205919 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.308836 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.308877 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.308889 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.308906 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.308918 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.412543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.412623 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.412639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.412674 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.412693 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.516443 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.516552 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.516647 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.516694 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.516719 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.571784 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 16:19:36.731747132 +0000 UTC Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.601636 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:48 crc kubenswrapper[4685]: E0129 16:26:48.601822 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.618759 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.618833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.618859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.618888 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.618924 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.720669 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.720697 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.720705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.720720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.720754 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.829619 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.829667 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.829678 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.829695 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.829706 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.870723 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" event={"ID":"65c9dddd-ada6-4134-af1e-d725cc23c354","Type":"ContainerStarted","Data":"d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.873558 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/1.log" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.874240 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/0.log" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.877920 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545" exitCode=1 Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.877967 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.878030 4685 scope.go:117] "RemoveContainer" containerID="56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.878931 4685 scope.go:117] "RemoveContainer" containerID="ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545" Jan 29 16:26:48 crc kubenswrapper[4685]: E0129 16:26:48.879122 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.917802 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.930564 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.931577 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.931603 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.931615 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.931629 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.931641 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:48Z","lastTransitionTime":"2026-01-29T16:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.943517 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.963114 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.973224 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:48 crc kubenswrapper[4685]: I0129 16:26:48.982885 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.002782 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.016039 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.032282 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.033964 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.033993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.034006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.034025 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.034037 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.048446 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.065486 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.084103 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.102344 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.121003 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.136473 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.136500 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.136509 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.136525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.136533 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.153427 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56add7a9857acb2c3ac40ddece912c6e20f7aba610724d19560d71637ad60f9b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:45Z\\\",\\\"message\\\":\\\"inpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.003920 5907 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0129 16:26:45.004013 5907 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004161 5907 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004282 5907 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004683 5907 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0129 16:26:45.004759 5907 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0129 16:26:45.005086 5907 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.177618 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.199742 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.239155 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.239201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.239213 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.239247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.239259 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.342336 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.342374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.342384 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.342416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.342426 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.437870 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438021 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:27:05.437996045 +0000 UTC m=+52.635352317 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.438077 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.438120 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.438189 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.438225 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438336 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438414 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438443 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438447 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438463 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438485 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438342 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438493 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:05.438464466 +0000 UTC m=+52.635820768 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438574 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:05.438560499 +0000 UTC m=+52.635916801 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438593 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:05.438583249 +0000 UTC m=+52.635939541 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438514 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.438668 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:05.438651631 +0000 UTC m=+52.636008023 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.444899 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.444946 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.444959 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.444981 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.444996 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.548347 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.548425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.548447 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.548472 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.548485 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.572035 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 22:38:33.654322747 +0000 UTC Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.601750 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.601768 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.601842 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.602331 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.602498 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.602574 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.602929 4685 scope.go:117] "RemoveContainer" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.653087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.653821 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.653843 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.653871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.653889 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.757071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.757106 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.757117 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.757132 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.757142 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.859801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.859858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.859873 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.859895 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.859910 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.883126 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/1.log" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.888086 4685 scope.go:117] "RemoveContainer" containerID="ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545" Jan 29 16:26:49 crc kubenswrapper[4685]: E0129 16:26:49.888330 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.888806 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" event={"ID":"65c9dddd-ada6-4134-af1e-d725cc23c354","Type":"ContainerStarted","Data":"a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.900549 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.915047 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.949170 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.962769 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.962815 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.962827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.962845 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.962859 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:49Z","lastTransitionTime":"2026-01-29T16:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:49 crc kubenswrapper[4685]: I0129 16:26:49.972044 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.001469 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:49Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.014383 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.040536 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.053424 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.065146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.065171 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.065180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.065193 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.065202 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.067491 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.081028 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.096316 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.108000 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.119301 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.128754 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.137971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.138009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.138022 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.138039 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.138052 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.140313 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.153423 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160353 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160558 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160590 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.160602 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.174602 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.174746 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.178891 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.178920 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.178932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.179191 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.179206 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.189990 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.193328 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.197449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.197499 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.197512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.197530 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.197542 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.212648 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.214025 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.216054 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.216075 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.216084 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.216097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.216106 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.230276 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.230245 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.230427 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.232077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.232146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.232161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.232186 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.232202 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.246525 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.247822 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.248007 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.248100 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:06.248077536 +0000 UTC m=+53.445433798 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.263169 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.300720 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.317142 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335341 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335646 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335664 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335691 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.335706 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.353053 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.371226 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.386374 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.402785 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.416243 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.429558 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.453151 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.453201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.453245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.453287 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.453299 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.466788 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.482803 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.497169 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.557016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.557070 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.557085 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.557105 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.557116 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.572557 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 11:10:35.770536372 +0000 UTC Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.601048 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:50 crc kubenswrapper[4685]: E0129 16:26:50.601276 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.660618 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.660691 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.660708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.660737 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.660755 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.765484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.765555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.765568 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.765592 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.765607 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.868811 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.868895 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.868910 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.868936 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.868956 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.896552 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.899368 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.923901 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.952974 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.973037 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.973110 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.973130 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.973160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.973183 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:50Z","lastTransitionTime":"2026-01-29T16:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.976422 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:50 crc kubenswrapper[4685]: I0129 16:26:50.993973 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:50Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.015884 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.037370 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.054956 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.075366 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.075469 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.075496 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.075527 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.075548 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.080289 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.102294 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.121936 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.139151 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.156098 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.172695 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.178994 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.179041 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.179055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.179096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.179111 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.202215 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.220929 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.239834 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.257732 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.281980 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.282031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.282044 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.282063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.282075 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.385155 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.385222 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.385238 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.385264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.385284 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.517862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.517907 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.517918 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.517940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.517951 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.573748 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 13:11:07.838999112 +0000 UTC Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.601245 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.601419 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.601435 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:51 crc kubenswrapper[4685]: E0129 16:26:51.601620 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:51 crc kubenswrapper[4685]: E0129 16:26:51.601729 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:51 crc kubenswrapper[4685]: E0129 16:26:51.601791 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.620696 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.620736 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.620750 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.620770 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.620784 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.724202 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.724295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.724323 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.724357 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.724381 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.828381 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.828480 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.828491 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.828511 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.828522 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.932197 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.932275 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.932289 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.932317 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:51 crc kubenswrapper[4685]: I0129 16:26:51.932333 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:51Z","lastTransitionTime":"2026-01-29T16:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.036064 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.036119 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.036133 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.036152 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.036164 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.139966 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.140069 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.140101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.140142 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.140170 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.243948 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.244010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.244025 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.244048 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.244061 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.347235 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.347305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.347324 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.347350 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.347371 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.451851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.451924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.451943 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.451970 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.451987 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.555859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.555932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.555945 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.555969 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.555983 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.574918 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 04:18:29.846154077 +0000 UTC Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.601216 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:52 crc kubenswrapper[4685]: E0129 16:26:52.601384 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.659061 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.659123 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.659141 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.659170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.659189 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.761910 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.761996 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.762010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.762035 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.762050 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.865254 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.865352 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.865365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.865388 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.865422 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.968233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.968284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.968300 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.968318 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:52 crc kubenswrapper[4685]: I0129 16:26:52.968329 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:52Z","lastTransitionTime":"2026-01-29T16:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.056695 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.070267 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.070303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.070312 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.070326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.070337 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.173387 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.173467 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.173480 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.173501 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.173514 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.276900 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.276947 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.276963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.276982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.276996 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.383244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.383274 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.383284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.383297 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.383305 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.486268 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.486313 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.486328 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.486348 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.486360 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.576147 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 02:25:04.261694203 +0000 UTC Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.589188 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.589232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.589247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.589266 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.589279 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.600888 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.601005 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:53 crc kubenswrapper[4685]: E0129 16:26:53.601068 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.601112 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:53 crc kubenswrapper[4685]: E0129 16:26:53.601384 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:53 crc kubenswrapper[4685]: E0129 16:26:53.601510 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.615934 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.644969 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.659865 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.676713 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.692218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.692264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.692283 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.692305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.692321 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.694922 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.709802 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.725232 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.738549 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.751275 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.767013 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.788117 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.794311 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.794349 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.794361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.794376 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.794386 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.809134 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.818626 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.829096 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.856088 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.869857 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.883811 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:26:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.897105 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.897131 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.897141 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.897156 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.897166 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.999902 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.999951 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:53 crc kubenswrapper[4685]: I0129 16:26:53.999968 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:53.999992 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.000012 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:53Z","lastTransitionTime":"2026-01-29T16:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.108534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.108576 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.108589 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.108609 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.108622 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.211266 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.211317 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.211335 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.211358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.211376 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.314645 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.314678 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.314689 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.314705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.314719 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.417323 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.417365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.417375 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.417410 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.417421 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.519761 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.519844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.519864 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.519890 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.519908 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.576768 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 02:44:34.162907468 +0000 UTC Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.601181 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:54 crc kubenswrapper[4685]: E0129 16:26:54.601353 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.623599 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.623668 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.623687 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.623716 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.623739 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.726959 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.727032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.727049 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.727074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.727089 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.829890 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.829957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.830002 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.830026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.830045 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.932726 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.932757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.932767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.932780 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:54 crc kubenswrapper[4685]: I0129 16:26:54.932792 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:54Z","lastTransitionTime":"2026-01-29T16:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.035050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.035094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.035109 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.035127 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.035138 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.137543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.137607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.137625 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.137655 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.137673 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.240827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.240936 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.240959 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.240984 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.241001 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.344606 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.344665 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.344678 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.344700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.344718 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.448176 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.448230 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.448243 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.448264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.448276 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.551896 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.551957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.551978 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.552007 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.552028 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.577460 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 21:22:44.685495131 +0000 UTC Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.600960 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:55 crc kubenswrapper[4685]: E0129 16:26:55.601224 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.601807 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:55 crc kubenswrapper[4685]: E0129 16:26:55.601975 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.602038 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:55 crc kubenswrapper[4685]: E0129 16:26:55.602098 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.657270 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.657325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.657341 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.657363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.657376 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.759534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.759584 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.759597 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.759618 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.759630 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.862821 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.862876 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.862889 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.862907 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.862916 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.966052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.966126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.966153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.966181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:55 crc kubenswrapper[4685]: I0129 16:26:55.966202 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:55Z","lastTransitionTime":"2026-01-29T16:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.069192 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.069237 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.069245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.069260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.069269 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.172504 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.172552 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.172562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.172582 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.172593 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.275524 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.275620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.275651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.275689 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.275710 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.378107 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.378150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.378162 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.378179 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.378191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.481175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.481249 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.481269 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.481294 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.481315 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.577595 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:14:18.125298867 +0000 UTC Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.584484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.584780 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.584959 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.585171 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.585364 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.601005 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:56 crc kubenswrapper[4685]: E0129 16:26:56.601286 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.689260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.689289 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.689298 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.689310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.689319 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.791638 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.791677 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.791688 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.791701 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.791711 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.894708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.894792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.894810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.894841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.894868 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.996749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.996786 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.996802 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.996818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:56 crc kubenswrapper[4685]: I0129 16:26:56.996829 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:56Z","lastTransitionTime":"2026-01-29T16:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.099332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.099370 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.099381 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.099416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.099431 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.201896 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.201947 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.201957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.201973 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.201985 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.304171 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.304203 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.304212 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.304225 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.304234 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.406734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.406782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.406791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.406807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.406816 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.509431 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.509496 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.509514 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.509537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.509553 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.578724 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 21:19:54.387470102 +0000 UTC Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.601506 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.601541 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.601669 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:57 crc kubenswrapper[4685]: E0129 16:26:57.601854 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:57 crc kubenswrapper[4685]: E0129 16:26:57.602044 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:57 crc kubenswrapper[4685]: E0129 16:26:57.602158 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.612566 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.612678 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.612699 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.612771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.612789 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.714833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.714875 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.714885 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.714899 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.714909 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.817518 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.817581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.817601 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.817626 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.817643 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.920182 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.920230 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.920242 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.920261 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:57 crc kubenswrapper[4685]: I0129 16:26:57.920274 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:57Z","lastTransitionTime":"2026-01-29T16:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.023218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.023277 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.023295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.023324 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.023344 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.125938 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.125990 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.126004 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.126020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.126032 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.229102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.229149 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.229160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.229178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.229189 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.331346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.331436 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.331455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.331478 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.331495 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.434378 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.434440 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.434451 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.434464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.434475 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.536841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.536891 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.536903 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.536919 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.536929 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.579651 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 01:29:14.366677033 +0000 UTC Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.601554 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:26:58 crc kubenswrapper[4685]: E0129 16:26:58.601799 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.640642 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.640696 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.640705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.640721 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.640730 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.742683 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.742721 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.742732 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.742746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.742755 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.845110 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.845148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.845157 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.845174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.845183 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.949264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.949344 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.949367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.949431 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:58 crc kubenswrapper[4685]: I0129 16:26:58.949457 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:58Z","lastTransitionTime":"2026-01-29T16:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.052797 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.052864 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.052900 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.052924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.052942 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.155540 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.155590 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.155607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.155629 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.155647 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.258233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.258299 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.258316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.258341 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.258358 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.361097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.361157 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.361174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.361195 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.361214 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.463570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.463614 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.463626 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.463641 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.463650 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.566260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.566326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.566350 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.566381 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.566452 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.580226 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 18:24:26.10904973 +0000 UTC Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.601744 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.601835 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:26:59 crc kubenswrapper[4685]: E0129 16:26:59.601962 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:26:59 crc kubenswrapper[4685]: E0129 16:26:59.602034 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.601839 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:26:59 crc kubenswrapper[4685]: E0129 16:26:59.602122 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.670185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.670231 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.670244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.670263 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.670275 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.773333 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.773431 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.773450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.773474 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.773490 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.877166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.877220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.877234 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.877254 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.877266 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.979672 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.979767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.979824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.979850 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:26:59 crc kubenswrapper[4685]: I0129 16:26:59.979870 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:26:59Z","lastTransitionTime":"2026-01-29T16:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.082813 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.082937 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.082958 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.082982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.082999 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.185748 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.185801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.185816 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.185838 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.185853 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.249871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.249914 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.249924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.249940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.249952 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.265317 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:00Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.272940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.273264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.273349 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.273428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.273460 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.292286 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:00Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.296184 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.296219 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.296230 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.296246 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.296257 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.309426 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:00Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.312563 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.312592 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.312601 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.312614 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.312624 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.324871 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:00Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.328468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.328496 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.328508 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.328523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.328532 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.339103 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:00Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.339214 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.340768 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.340800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.340809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.340823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.340833 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.443568 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.443647 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.443672 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.443702 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.443736 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.546120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.546180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.546198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.546226 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.546251 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.580732 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 15:23:09.106042532 +0000 UTC Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.601237 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:00 crc kubenswrapper[4685]: E0129 16:27:00.601476 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.653458 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.653500 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.653512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.653529 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.653541 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.756130 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.756201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.756245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.756271 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.756288 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.859576 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.859625 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.859641 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.859659 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.859668 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.961916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.961963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.961973 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.961988 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:00 crc kubenswrapper[4685]: I0129 16:27:00.961997 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:00Z","lastTransitionTime":"2026-01-29T16:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.065251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.065305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.065324 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.065345 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.065357 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.168034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.168074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.168086 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.168103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.168115 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.270912 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.270970 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.270993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.271023 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.271044 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.359486 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.379738 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.383110 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.383144 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.383157 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.383175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.383188 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.415314 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.437513 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.450618 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.463574 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.476311 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.485361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.485386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.485411 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.485429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.485438 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.489659 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.502829 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.517078 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.537534 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.558340 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.575963 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.581790 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 22:11:58.917616703 +0000 UTC Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.588038 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.588098 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.588113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.588138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.588157 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.590968 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.600760 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.600760 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.600964 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:01 crc kubenswrapper[4685]: E0129 16:27:01.600983 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:01 crc kubenswrapper[4685]: E0129 16:27:01.601207 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:01 crc kubenswrapper[4685]: E0129 16:27:01.601336 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.601993 4685 scope.go:117] "RemoveContainer" containerID="ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.613606 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.626325 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.636767 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.650253 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.665020 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.690999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.691031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.691040 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.691055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.691066 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.793313 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.793358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.793371 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.793412 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.793427 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.895509 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.895546 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.895555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.895569 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.895578 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.939376 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/1.log" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.941619 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa"} Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.942101 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.957417 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.974831 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.987060 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:01Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.997977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.998046 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.998088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.998124 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:01 crc kubenswrapper[4685]: I0129 16:27:01.998136 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:01Z","lastTransitionTime":"2026-01-29T16:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.002810 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.020688 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.032921 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.045454 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.059023 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.070468 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.082739 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.094021 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.100900 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.100940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.100950 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.100969 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.100990 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.107682 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.117844 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.127098 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.144733 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.156105 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.167101 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.177328 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.203684 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.203726 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.203735 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.203750 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.203759 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.306709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.306753 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.306765 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.306782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.306795 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.411266 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.411328 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.411344 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.411365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.411378 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.515233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.515327 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.515338 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.515356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.515366 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.582942 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 03:03:20.915664588 +0000 UTC Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.601578 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:02 crc kubenswrapper[4685]: E0129 16:27:02.601757 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.618151 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.618199 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.618209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.618226 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.618236 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.720791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.720878 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.720901 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.720930 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.720952 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.824020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.824101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.824125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.824156 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.824183 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.927662 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.927753 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.927771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.927794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.927809 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:02Z","lastTransitionTime":"2026-01-29T16:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.946721 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/2.log" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.947741 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/1.log" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.954475 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" exitCode=1 Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.954534 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa"} Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.954580 4685 scope.go:117] "RemoveContainer" containerID="ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.955496 4685 scope.go:117] "RemoveContainer" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" Jan 29 16:27:02 crc kubenswrapper[4685]: E0129 16:27:02.955874 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.969715 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.981223 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:02 crc kubenswrapper[4685]: I0129 16:27:02.997178 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:02Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.010830 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.021841 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.030999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.031074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.031099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.031125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.031140 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.032884 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.056765 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.061359 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.070723 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.085924 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.098839 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.117568 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.131272 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.133216 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.133277 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.133288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.133310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.133324 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.146436 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.166336 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.183438 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.198640 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.213215 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.230378 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.236750 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.236814 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.236825 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.236850 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.236865 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.248889 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.264122 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.276292 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.295196 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.309362 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.322250 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.335768 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.339647 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.339935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.340022 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.340095 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.340153 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.347347 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.359247 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.369248 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.380046 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.389948 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.402893 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.418942 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.430957 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442227 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442677 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.442751 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.453209 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.473811 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.544793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.544832 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.544844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.544859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.544871 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.583883 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 21:58:02.395065265 +0000 UTC Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.601550 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:03 crc kubenswrapper[4685]: E0129 16:27:03.601907 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.601709 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:03 crc kubenswrapper[4685]: E0129 16:27:03.602146 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.601637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:03 crc kubenswrapper[4685]: E0129 16:27:03.602538 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.622597 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.637021 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.647746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.647789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.647798 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.647817 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.647828 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.649052 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.661668 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.673765 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.686704 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.708840 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.720628 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.745442 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab7e4359a6c430f9b2d8fb09c3e1dd7ad17241bc0b757d8c860f533a41c3f545\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:26:48Z\\\",\\\"message\\\":\\\"actory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0129 16:26:48.135343 6108 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0129 16:26:48.135366 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0129 16:26:48.135369 6108 handler.go:208] Removed *v1.Node event handler 2\\\\nI0129 16:26:48.135433 6108 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0129 16:26:48.135440 6108 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0129 16:26:48.135445 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0129 16:26:48.135460 6108 handler.go:208] Removed *v1.Node event handler 7\\\\nI0129 16:26:48.135483 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0129 16:26:48.135499 6108 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0129 16:26:48.135512 6108 factory.go:656] Stopping watch factory\\\\nI0129 16:26:48.135535 6108 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0129 16:26:48.135899 6108 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0129 16:26:48.136078 6108 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0129 16:26:48.136162 6108 ovnkube.go:599] Stopped ovnkube\\\\nI0129 16:26:48.136227 6108 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0129 16:26:48.136382 6108 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.749910 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.750072 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.750170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.750262 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.750340 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.759673 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.772369 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.784880 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.799377 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.812132 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.825065 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.836573 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.847716 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.853186 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.853238 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.853254 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.853276 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.853290 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.859683 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.956053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.956630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.956743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.956829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.956905 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:03Z","lastTransitionTime":"2026-01-29T16:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.959341 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/2.log" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.964584 4685 scope.go:117] "RemoveContainer" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" Jan 29 16:27:03 crc kubenswrapper[4685]: E0129 16:27:03.964722 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.982875 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:03 crc kubenswrapper[4685]: I0129 16:27:03.997677 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:03Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.015434 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.044253 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.057574 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.061731 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.061783 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.061796 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.061819 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.061831 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.073229 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.093851 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.108809 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.125173 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.139268 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.153928 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.165747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.165808 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.165824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.165848 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.165867 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.169685 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.189497 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.211180 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.231079 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.245597 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.259743 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.268763 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.268801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.268809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.268823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.268834 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.292007 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:04Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.371457 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.371523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.371547 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.371583 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.371607 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.474639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.474689 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.474700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.474717 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.474728 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.579117 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.579169 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.579189 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.579230 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.579248 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.584235 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 01:57:39.261537319 +0000 UTC Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.601615 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:04 crc kubenswrapper[4685]: E0129 16:27:04.601815 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.682882 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.682927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.682940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.682960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.682970 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.785094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.785139 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.785148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.785163 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.785185 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.888042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.888079 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.888088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.888101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.888111 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.990519 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.990568 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.990699 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.990722 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:04 crc kubenswrapper[4685]: I0129 16:27:04.990740 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:04Z","lastTransitionTime":"2026-01-29T16:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.093428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.093461 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.093470 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.093483 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.093494 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.196542 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.196587 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.196596 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.196611 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.196622 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.299009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.299062 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.299079 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.299103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.299120 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.402687 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.403100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.403185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.403528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.403607 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.481302 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.481430 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.481488 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.481514 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.481546 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481603 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:27:37.481586352 +0000 UTC m=+84.678942614 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481668 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481695 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481720 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481766 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481734 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:37.481726015 +0000 UTC m=+84.679082277 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481782 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481776 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481817 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:37.481791927 +0000 UTC m=+84.679148219 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481824 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481842 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:37.481830208 +0000 UTC m=+84.679186510 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481846 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.481934 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:37.48190785 +0000 UTC m=+84.679264142 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.506138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.506170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.506178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.506193 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.506203 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.585676 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 22:10:27.977615021 +0000 UTC Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.601154 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.601231 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.601159 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.601330 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.601438 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:05 crc kubenswrapper[4685]: E0129 16:27:05.601587 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.610562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.610670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.610692 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.610762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.610782 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.714059 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.714125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.714148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.714176 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.714199 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.817219 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.817262 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.817273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.817290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.817303 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.919710 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.919749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.919760 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.919777 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:05 crc kubenswrapper[4685]: I0129 16:27:05.919788 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:05Z","lastTransitionTime":"2026-01-29T16:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.023076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.023168 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.023188 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.023211 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.023231 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.125756 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.125799 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.125811 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.125829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.125841 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.228508 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.228550 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.228562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.228581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.228595 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.291700 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:06 crc kubenswrapper[4685]: E0129 16:27:06.291897 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:27:06 crc kubenswrapper[4685]: E0129 16:27:06.291986 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:27:38.291962351 +0000 UTC m=+85.489318653 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.332003 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.332072 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.332087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.332112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.332127 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.436550 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.436603 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.436624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.436651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.436670 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.538998 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.539030 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.539041 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.539055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.539065 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.586795 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 15:08:32.461790417 +0000 UTC Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.601172 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:06 crc kubenswrapper[4685]: E0129 16:27:06.601301 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.641367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.641420 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.641428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.641441 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.641452 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.744454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.744813 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.744927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.745016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.745100 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.847690 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.847724 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.847734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.847747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.847756 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.950414 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.950475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.950487 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.950507 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:06 crc kubenswrapper[4685]: I0129 16:27:06.950520 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:06Z","lastTransitionTime":"2026-01-29T16:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.053167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.053222 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.053454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.053477 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.053494 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.157001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.157288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.157384 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.157564 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.157659 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.259941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.260307 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.260496 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.260758 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.260909 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.364927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.365009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.365023 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.365047 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.365062 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.468308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.468351 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.468362 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.468378 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.468417 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.572025 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.572088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.572103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.572127 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.572142 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.587701 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 06:06:20.031167958 +0000 UTC Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.601004 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.601034 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:07 crc kubenswrapper[4685]: E0129 16:27:07.601206 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.601322 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:07 crc kubenswrapper[4685]: E0129 16:27:07.601525 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:07 crc kubenswrapper[4685]: E0129 16:27:07.601640 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.675044 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.675149 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.675163 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.675177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.675185 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.778499 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.778557 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.778570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.778591 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.778604 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.882777 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.882828 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.882841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.882857 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.882868 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.985014 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.985071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.985084 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.985156 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:07 crc kubenswrapper[4685]: I0129 16:27:07.985172 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:07Z","lastTransitionTime":"2026-01-29T16:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.088720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.088782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.088802 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.088829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.088850 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.191119 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.191161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.191173 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.191190 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.191202 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.294304 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.294334 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.294343 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.294356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.294364 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.397666 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.397724 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.397749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.397780 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.397801 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.500363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.500454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.500467 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.500481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.500491 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.588081 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 15:47:47.01967366 +0000 UTC Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.601556 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:08 crc kubenswrapper[4685]: E0129 16:27:08.601781 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.603699 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.603792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.603832 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.603870 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.603892 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.707329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.707543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.707591 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.707624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.707649 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.811329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.811417 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.811441 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.811468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.811493 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.914130 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.914192 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.914215 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.914249 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:08 crc kubenswrapper[4685]: I0129 16:27:08.914270 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:08Z","lastTransitionTime":"2026-01-29T16:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.017670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.017749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.017772 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.017806 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.017835 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.120360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.120461 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.120481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.120513 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.120534 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.224269 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.224341 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.224358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.224384 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.224437 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.327914 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.327969 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.327991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.328024 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.328049 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.432011 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.432091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.432534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.432566 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.432582 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.535990 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.536053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.536067 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.536089 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.536109 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.588384 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 22:27:41.216671989 +0000 UTC Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.600918 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.600959 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.601024 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:09 crc kubenswrapper[4685]: E0129 16:27:09.601049 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:09 crc kubenswrapper[4685]: E0129 16:27:09.601156 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:09 crc kubenswrapper[4685]: E0129 16:27:09.601254 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.638816 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.638841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.638849 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.638862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.638871 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.741479 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.741515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.741526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.741543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.741553 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.843545 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.843577 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.843585 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.843600 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.843609 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.945809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.945860 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.945876 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.945898 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:09 crc kubenswrapper[4685]: I0129 16:27:09.945911 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:09Z","lastTransitionTime":"2026-01-29T16:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.049006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.049542 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.049725 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.049865 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.050004 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.152338 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.152705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.152890 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.153091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.153252 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.256213 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.256276 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.256288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.256305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.256316 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.359120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.359158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.359167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.359185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.359198 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.461757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.462130 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.462205 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.462267 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.462335 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.565090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.565158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.565180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.565207 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.565229 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.589449 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:03:22.132791194 +0000 UTC Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.600635 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.600758 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.668273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.668339 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.668355 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.668375 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.668387 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.717137 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.717171 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.717180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.717194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.717204 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.730707 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:10Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.736487 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.736537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.736554 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.736579 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.736595 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.751342 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:10Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.756818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.756886 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.756909 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.756941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.757024 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.773497 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:10Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.777278 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.777360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.777373 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.777427 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.777442 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.791524 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:10Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.795905 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.795941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.795953 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.795971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.795983 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.813016 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:10Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:10 crc kubenswrapper[4685]: E0129 16:27:10.813141 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.815099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.815169 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.815185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.815209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.815223 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.918817 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.918887 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.918912 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.918938 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:10 crc kubenswrapper[4685]: I0129 16:27:10.918956 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:10Z","lastTransitionTime":"2026-01-29T16:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.022016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.022089 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.022115 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.022146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.022172 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.125742 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.125851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.125877 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.125910 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.125933 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.229558 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.229650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.229665 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.229692 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.229707 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.333961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.334420 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.334596 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.334797 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.334943 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.437714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.437760 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.437771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.437792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.437804 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.541300 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.541367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.541386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.541439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.541458 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.590844 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 07:15:37.324511585 +0000 UTC Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.600993 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.601059 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.601005 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:11 crc kubenswrapper[4685]: E0129 16:27:11.601215 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:11 crc kubenswrapper[4685]: E0129 16:27:11.601342 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:11 crc kubenswrapper[4685]: E0129 16:27:11.601479 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.672791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.672881 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.672901 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.672932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.672955 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.775581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.775631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.775641 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.775656 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.775665 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.878725 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.878799 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.878819 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.878846 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.878863 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.982563 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.982677 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.982693 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.982716 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:11 crc kubenswrapper[4685]: I0129 16:27:11.982729 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:11Z","lastTransitionTime":"2026-01-29T16:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.086136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.086175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.086185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.086201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.086210 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.189211 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.189646 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.189805 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.190148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.190340 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.292949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.293040 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.293062 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.293088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.293110 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.396528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.396828 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.396897 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.396964 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.397029 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.500209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.500248 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.500272 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.500288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.500297 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.591380 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 00:24:04.194161353 +0000 UTC Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.600758 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:12 crc kubenswrapper[4685]: E0129 16:27:12.601006 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.602325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.602387 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.602444 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.602472 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.602495 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.704997 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.705045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.705053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.705069 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.705082 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.808172 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.808205 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.808228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.808246 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.808256 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.910435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.910497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.910515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.910543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:12 crc kubenswrapper[4685]: I0129 16:27:12.910566 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:12Z","lastTransitionTime":"2026-01-29T16:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.013521 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.013562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.013576 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.013593 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.013604 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.117068 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.117148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.117176 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.117209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.117233 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.221166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.221237 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.221256 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.221284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.221306 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.324071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.324125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.324142 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.324166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.324186 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.428233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.428303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.428327 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.428358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.428382 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.531005 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.531045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.531088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.531112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.531134 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.593302 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 03:56:11.010517957 +0000 UTC Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.601673 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.601725 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.609862 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:13 crc kubenswrapper[4685]: E0129 16:27:13.610146 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:13 crc kubenswrapper[4685]: E0129 16:27:13.611915 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:13 crc kubenswrapper[4685]: E0129 16:27:13.612432 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.628830 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.634656 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.634780 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.634844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.634875 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.634936 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.650788 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.666942 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.681384 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.697733 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.714036 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.733328 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.738562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.738642 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.738657 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.738676 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.738689 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.755927 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.772967 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.790169 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.808650 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.835464 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.840800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.840857 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.840875 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.840901 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.840923 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.854521 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.871601 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.891208 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.914674 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.933180 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.942785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.942824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.942838 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.942857 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.942872 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:13Z","lastTransitionTime":"2026-01-29T16:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:13 crc kubenswrapper[4685]: I0129 16:27:13.949607 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:13Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.045944 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.045985 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.045994 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.046010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.046019 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.152993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.153046 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.153058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.153076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.153091 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.256544 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.256605 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.256622 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.256646 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.256664 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.359767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.359797 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.359806 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.359818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.359827 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.461857 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.461892 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.461907 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.461923 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.461932 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.564844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.564909 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.564994 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.565026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.565049 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.593892 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 05:14:37.721827357 +0000 UTC Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.601305 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:14 crc kubenswrapper[4685]: E0129 16:27:14.601547 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.667245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.667284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.667293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.667308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.667317 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.770117 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.770178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.770195 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.770216 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.770233 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.873346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.873382 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.873410 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.873429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.873439 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.975869 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.975934 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.975947 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.975962 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:14 crc kubenswrapper[4685]: I0129 16:27:14.975971 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:14Z","lastTransitionTime":"2026-01-29T16:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.078655 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.078731 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.078744 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.078762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.078777 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.181005 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.181050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.181061 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.181078 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.181087 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.283434 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.283475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.283486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.283500 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.283509 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.386310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.386354 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.386367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.386385 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.386438 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.489851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.489916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.489934 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.489961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.489978 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.592316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.592492 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.592512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.592536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.592552 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.594701 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 05:44:44.045425971 +0000 UTC Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.600969 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:15 crc kubenswrapper[4685]: E0129 16:27:15.601061 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.601183 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:15 crc kubenswrapper[4685]: E0129 16:27:15.601227 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.601410 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:15 crc kubenswrapper[4685]: E0129 16:27:15.601454 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.694792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.694832 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.694844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.694862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.694872 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.799712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.799764 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.799775 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.799795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.799808 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.902284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.902343 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.902365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.902423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:15 crc kubenswrapper[4685]: I0129 16:27:15.902442 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:15Z","lastTransitionTime":"2026-01-29T16:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.006361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.006812 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.007068 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.007247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.007458 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.112290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.112695 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.112800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.112897 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.112999 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.216621 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.216991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.217166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.217319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.217493 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.320794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.320840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.320850 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.320865 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.320876 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.424099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.424164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.424182 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.424209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.424229 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.528225 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.528296 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.528314 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.528343 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.528362 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.595303 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 10:37:58.683843863 +0000 UTC Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.601686 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:16 crc kubenswrapper[4685]: E0129 16:27:16.602059 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.630976 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.631032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.631049 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.631076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.631094 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.734543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.734606 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.734620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.734639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.734654 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.837650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.837723 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.837747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.837779 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.837800 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.941100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.941158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.941175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.941198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:16 crc kubenswrapper[4685]: I0129 16:27:16.941214 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:16Z","lastTransitionTime":"2026-01-29T16:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.044656 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.044713 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.044738 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.044762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.044781 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.147887 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.147973 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.147991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.148016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.148033 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.251554 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.251632 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.251651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.251686 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.251711 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.356076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.356173 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.356219 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.356261 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.356286 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.459903 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.459955 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.459977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.460001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.460014 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.563795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.563879 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.563899 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.563949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.563985 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.595521 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 22:10:05.691162315 +0000 UTC Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.601091 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.601127 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.601465 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:17 crc kubenswrapper[4685]: E0129 16:27:17.601565 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:17 crc kubenswrapper[4685]: E0129 16:27:17.601801 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.601870 4685 scope.go:117] "RemoveContainer" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" Jan 29 16:27:17 crc kubenswrapper[4685]: E0129 16:27:17.602021 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:17 crc kubenswrapper[4685]: E0129 16:27:17.602143 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.667719 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.667766 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.667777 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.667801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.667813 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.770869 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.770956 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.770982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.771009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.771025 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.873170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.873217 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.873228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.873243 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.873252 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.975791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.975831 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.975841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.975854 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:17 crc kubenswrapper[4685]: I0129 16:27:17.975864 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:17Z","lastTransitionTime":"2026-01-29T16:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.079459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.079540 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.079555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.079579 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.079595 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.182982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.183068 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.183092 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.183128 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.183151 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.286570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.286668 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.286691 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.286724 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.286744 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.390256 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.390329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.390357 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.390425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.390449 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.493626 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.493706 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.493720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.493738 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.493750 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.595860 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 15:51:11.117427672 +0000 UTC Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.596615 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.596653 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.596672 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.596700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.596721 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.601562 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:18 crc kubenswrapper[4685]: E0129 16:27:18.601856 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.700442 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.700518 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.700536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.700565 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.700586 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.803617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.803696 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.803712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.803733 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.803751 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.909088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.909157 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.909169 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.909193 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:18 crc kubenswrapper[4685]: I0129 16:27:18.909205 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:18Z","lastTransitionTime":"2026-01-29T16:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.012200 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.012250 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.012260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.012279 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.012289 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.115650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.115730 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.115757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.115787 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.115806 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.220684 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.220774 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.220800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.220833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.220855 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.324800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.324881 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.324908 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.324945 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.324968 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.428708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.428782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.428801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.428831 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.428852 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.531706 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.531828 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.531855 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.532260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.532283 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.596221 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 20:28:50.194429605 +0000 UTC Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.601739 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.601782 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.601778 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:19 crc kubenswrapper[4685]: E0129 16:27:19.601943 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:19 crc kubenswrapper[4685]: E0129 16:27:19.602125 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:19 crc kubenswrapper[4685]: E0129 16:27:19.602360 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.635891 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.635957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.635974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.636002 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.636021 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.739234 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.739296 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.739310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.739337 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.739359 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.842303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.842428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.842448 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.842475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.842528 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.946149 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.946204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.946218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.946243 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:19 crc kubenswrapper[4685]: I0129 16:27:19.946255 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:19Z","lastTransitionTime":"2026-01-29T16:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.048856 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.048909 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.048924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.048943 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.048961 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.151232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.151287 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.151302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.151325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.151338 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.254423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.254479 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.254493 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.254516 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.254530 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.358936 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.359026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.359122 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.359163 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.359186 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.462462 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.462526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.462549 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.462581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.462602 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.564692 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.564740 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.564754 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.564778 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.564795 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.597194 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 12:04:11.760938986 +0000 UTC Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.601569 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.601730 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.667340 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.667378 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.667388 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.667446 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.667458 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.769798 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.769869 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.769889 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.769917 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.769938 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.877360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.877423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.877435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.877453 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.877464 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.921912 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.921960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.921973 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.921991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.922003 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.933163 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:20Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.936927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.937047 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.937081 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.937112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.937137 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.950433 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:20Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.954183 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.954231 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.954240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.954255 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.954264 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.964549 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:20Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.967818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.967849 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.967859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.967874 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.967885 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.979457 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:20Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.983187 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.983230 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.983242 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.983259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.983268 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.993456 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:20Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:20 crc kubenswrapper[4685]: E0129 16:27:20.993621 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.995080 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.995114 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.995126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.995142 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:20 crc kubenswrapper[4685]: I0129 16:27:20.995153 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:20Z","lastTransitionTime":"2026-01-29T16:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.098055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.098121 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.098136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.098159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.098171 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.200426 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.200459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.200468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.200501 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.200510 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.302414 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.302452 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.302463 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.302476 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.302486 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.404769 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.404809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.404818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.404836 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.404845 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.507535 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.507584 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.507600 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.507621 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.507636 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.598225 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 15:14:09.770105112 +0000 UTC Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.600783 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.600914 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:21 crc kubenswrapper[4685]: E0129 16:27:21.600974 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:21 crc kubenswrapper[4685]: E0129 16:27:21.601075 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.600910 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:21 crc kubenswrapper[4685]: E0129 16:27:21.601351 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.610416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.610451 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.610463 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.610477 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.610487 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.713450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.713492 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.713504 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.713517 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.713525 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.816193 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.816245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.816258 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.816273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.816285 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.919867 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.919921 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.919935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.919956 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:21 crc kubenswrapper[4685]: I0129 16:27:21.919969 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:21Z","lastTransitionTime":"2026-01-29T16:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.023414 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.023476 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.023488 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.023505 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.023535 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.031828 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/0.log" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.031913 4685 generic.go:334] "Generic (PLEG): container finished" podID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" containerID="8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f" exitCode=1 Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.032003 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerDied","Data":"8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.033032 4685 scope.go:117] "RemoveContainer" containerID="8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.054675 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.079291 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.093512 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.107296 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.125821 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.125981 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.126018 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.126108 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.126177 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.127276 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.146608 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.165043 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.182253 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.202668 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.221696 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.233214 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.233714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.233727 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.233743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.233754 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.235349 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.252794 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.267756 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.282985 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.301826 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.317672 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.329424 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.341926 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.341986 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.342000 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.342031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.342043 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.342038 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:22Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.444465 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.444509 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.444517 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.444531 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.444540 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.546652 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.546718 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.546730 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.546750 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.546769 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.599432 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 11:47:34.172793589 +0000 UTC Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.600652 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:22 crc kubenswrapper[4685]: E0129 16:27:22.600787 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.649151 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.649211 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.649223 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.649247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.649261 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.758077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.758116 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.758126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.758143 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.758155 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.861167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.861233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.861247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.861263 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.861272 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.963828 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.963888 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.963908 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.963936 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:22 crc kubenswrapper[4685]: I0129 16:27:22.963958 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:22Z","lastTransitionTime":"2026-01-29T16:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.039069 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/0.log" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.039154 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerStarted","Data":"d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.062560 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.066429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.066483 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.066501 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.066525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.066541 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.074367 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.086200 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.098088 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.112632 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.127310 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.141596 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.155960 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.170129 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.170176 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.170191 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.170209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.170219 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.175269 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.187852 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.200317 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.217726 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.234677 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.254787 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.267684 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.273113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.273187 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.273203 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.273229 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.273244 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.283490 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.297908 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.309969 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.376177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.376255 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.376265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.376280 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.376290 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.478709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.478772 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.478784 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.478801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.478813 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.580953 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.580997 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.581009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.581028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.581041 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.599550 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 06:14:56.776573459 +0000 UTC Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.600829 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.600901 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:23 crc kubenswrapper[4685]: E0129 16:27:23.600980 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.600912 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:23 crc kubenswrapper[4685]: E0129 16:27:23.601104 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:23 crc kubenswrapper[4685]: E0129 16:27:23.601282 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.617340 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.635953 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.655410 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.669354 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.680676 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.683122 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.683150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.683165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.683180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.683191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.692430 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.708354 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.719947 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.731470 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.740137 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.749172 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.758389 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.772720 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.783298 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.785962 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.786000 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.786013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.786032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.786044 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.795600 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.807476 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.829454 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.842127 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:23Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.888945 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.889041 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.889066 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.889096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.889116 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.991608 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.991651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.991663 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.991679 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:23 crc kubenswrapper[4685]: I0129 16:27:23.991689 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:23Z","lastTransitionTime":"2026-01-29T16:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.094554 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.094625 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.094638 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.094683 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.094697 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.197293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.197486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.197514 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.197544 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.197567 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.300004 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.300036 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.300044 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.300059 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.300068 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.402742 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.402784 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.402799 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.402863 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.402883 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.505034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.505102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.505125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.505153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.505176 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.600125 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 08:44:05.161017133 +0000 UTC Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.601361 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:24 crc kubenswrapper[4685]: E0129 16:27:24.601638 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.608732 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.609663 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.609775 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.609809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.609832 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.610466 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.714175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.714218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.714228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.714245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.714256 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.816906 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.816948 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.816961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.817010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.817031 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.919661 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.919706 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.919715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.919731 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:24 crc kubenswrapper[4685]: I0129 16:27:24.919744 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:24Z","lastTransitionTime":"2026-01-29T16:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.021871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.022329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.022525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.022753 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.022960 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.125181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.125220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.125232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.125248 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.125258 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.228548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.228612 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.228624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.228645 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.228656 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.332051 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.332094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.332106 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.332122 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.332132 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.434290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.434328 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.434342 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.434357 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.434369 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.536239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.536280 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.536290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.536314 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.536325 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.601448 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:51:53.933463733 +0000 UTC Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.601588 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.601601 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:25 crc kubenswrapper[4685]: E0129 16:27:25.601715 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:25 crc kubenswrapper[4685]: E0129 16:27:25.601842 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.601487 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:25 crc kubenswrapper[4685]: E0129 16:27:25.602235 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.638664 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.638718 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.638727 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.638743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.638759 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.740993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.741028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.741038 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.741051 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.741059 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.843102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.843141 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.843157 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.843174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.843183 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.945709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.946128 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.946464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.946572 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:25 crc kubenswrapper[4685]: I0129 16:27:25.946594 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:25Z","lastTransitionTime":"2026-01-29T16:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.048159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.048204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.048215 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.048232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.048243 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.150784 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.150827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.150839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.150856 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.150868 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.252719 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.252794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.252808 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.252825 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.252838 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.354449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.354489 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.354506 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.354520 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.354532 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.456038 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.456070 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.456080 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.456096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.456107 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.560429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.560533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.561014 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.561252 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.561498 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.601424 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:26 crc kubenswrapper[4685]: E0129 16:27:26.601622 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.601691 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 20:34:26.522494171 +0000 UTC Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.665339 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.665484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.665513 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.665617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.665727 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.768767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.768812 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.768861 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.768877 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.768887 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.871040 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.871077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.871086 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.871101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.871115 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.973967 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.974008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.974017 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.974030 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:26 crc kubenswrapper[4685]: I0129 16:27:26.974038 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:26Z","lastTransitionTime":"2026-01-29T16:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.076310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.076354 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.076363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.076378 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.076406 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.179301 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.179353 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.179365 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.179386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.179415 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.282046 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.282090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.282104 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.282121 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.282133 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.384657 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.384925 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.384942 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.384957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.384965 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.487956 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.488218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.488312 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.488424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.488518 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.590839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.591123 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.591202 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.591272 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.591335 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.601022 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.601130 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.601059 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:27 crc kubenswrapper[4685]: E0129 16:27:27.601355 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:27 crc kubenswrapper[4685]: E0129 16:27:27.601458 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:27 crc kubenswrapper[4685]: E0129 16:27:27.601605 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.602722 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 21:33:03.009378414 +0000 UTC Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.694529 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.694581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.694592 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.694611 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.694623 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.796657 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.796697 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.796709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.796725 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.796740 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.899652 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.899944 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.899957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.899972 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:27 crc kubenswrapper[4685]: I0129 16:27:27.899981 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:27Z","lastTransitionTime":"2026-01-29T16:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.001916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.001965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.001977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.001993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.002004 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.104144 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.104180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.104189 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.104201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.104210 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.206904 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.206965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.206984 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.207008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.207024 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.309703 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.309765 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.309783 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.309808 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.309824 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.412790 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.412865 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.412889 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.412922 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.412945 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.515120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.515165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.515177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.515194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.515205 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.601437 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:28 crc kubenswrapper[4685]: E0129 16:27:28.601602 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.603602 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 15:44:06.206778519 +0000 UTC Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.618136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.618189 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.618202 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.618224 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.618238 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.720940 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.720999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.721016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.721042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.721059 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.824110 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.824150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.824164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.824181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.824191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.926524 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.926585 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.926601 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.926627 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:28 crc kubenswrapper[4685]: I0129 16:27:28.926644 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:28Z","lastTransitionTime":"2026-01-29T16:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.029752 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.029831 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.029842 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.029858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.029867 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.131989 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.132023 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.132032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.132045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.132055 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.236082 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.236142 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.236158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.236182 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.236198 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.338927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.338963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.338972 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.338988 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.338998 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.442008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.442073 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.442091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.442115 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.442132 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.545440 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.545483 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.545493 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.545510 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.545521 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.601536 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.601611 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.601624 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:29 crc kubenswrapper[4685]: E0129 16:27:29.601742 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:29 crc kubenswrapper[4685]: E0129 16:27:29.601864 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:29 crc kubenswrapper[4685]: E0129 16:27:29.602077 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.603832 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 23:14:19.086111256 +0000 UTC Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.647979 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.648026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.648035 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.648050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.648058 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.750306 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.750361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.750375 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.750410 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.750424 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.852358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.852423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.852438 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.852455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.852466 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.954953 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.954992 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.955003 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.955020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:29 crc kubenswrapper[4685]: I0129 16:27:29.955032 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:29Z","lastTransitionTime":"2026-01-29T16:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.057811 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.057851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.057863 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.057883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.057895 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.159969 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.160013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.160027 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.160043 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.160054 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.262669 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.262707 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.262715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.262727 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.262736 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.365251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.365291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.365302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.365317 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.365329 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.468151 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.468195 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.468205 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.468220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.468230 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.570782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.570943 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.570961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.570982 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.570995 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.601274 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:30 crc kubenswrapper[4685]: E0129 16:27:30.601406 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.604534 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 14:01:23.61694528 +0000 UTC Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.686935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.686988 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.687006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.687037 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.687056 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.789928 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.789979 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.789991 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.790009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.790021 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.892981 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.893037 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.893053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.893077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.893099 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.996172 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.996320 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.996355 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.996382 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:30 crc kubenswrapper[4685]: I0129 16:27:30.996438 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:30Z","lastTransitionTime":"2026-01-29T16:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.015634 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.015667 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.015680 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.015693 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.015704 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.038209 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:31Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.041631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.041669 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.041682 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.041702 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.041715 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.059327 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:31Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.063736 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.063785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.063798 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.063817 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.063832 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.076138 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:31Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.079999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.080045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.080058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.080076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.080088 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.092178 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:31Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.095370 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.095427 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.095439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.095454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.095462 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.109256 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:31Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.109449 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.111092 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.111167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.111188 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.111212 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.111227 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.214194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.214244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.214265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.214283 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.214294 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.317358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.317482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.317501 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.317525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.317542 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.420466 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.420508 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.420519 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.420534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.420544 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.523523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.523599 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.523623 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.523861 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.523918 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.601710 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.601757 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.601999 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.601995 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.602463 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:31 crc kubenswrapper[4685]: E0129 16:27:31.602548 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.603161 4685 scope.go:117] "RemoveContainer" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.604638 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 01:11:57.017478191 +0000 UTC Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.628493 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.628537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.628549 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.628598 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.628612 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.730685 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.730730 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.730743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.730760 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.730771 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.833499 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.833541 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.833551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.833570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.833582 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.936512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.936584 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.936603 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.936631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:31 crc kubenswrapper[4685]: I0129 16:27:31.936651 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:31Z","lastTransitionTime":"2026-01-29T16:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.040044 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.040092 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.040105 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.040124 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.040138 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.068772 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/2.log" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.073386 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.074104 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.087511 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.106085 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.127328 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.140797 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.142201 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.142244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.142260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.142280 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.142295 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.154439 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.169898 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.180779 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.192535 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.210081 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.227819 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.245482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.245512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.245522 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.245536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.245546 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.246598 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.271186 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.288426 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.314948 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.346680 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.348449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.348516 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.348535 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.348562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.348583 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.367312 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.382669 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.404116 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.421256 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:32Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.455147 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.455210 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.455229 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.455255 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.455270 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.557810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.557858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.557871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.557888 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.557898 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.601028 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:32 crc kubenswrapper[4685]: E0129 16:27:32.601257 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.605013 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 11:50:38.844151314 +0000 UTC Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.660382 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.660442 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.660455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.660474 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.660485 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.763294 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.763332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.763343 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.763356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.763365 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.866572 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.866624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.866639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.866658 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.866670 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.969666 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.969705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.969715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.969731 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:32 crc kubenswrapper[4685]: I0129 16:27:32.969739 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:32Z","lastTransitionTime":"2026-01-29T16:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.072924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.072971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.072989 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.073013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.073027 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.179438 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.179498 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.179515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.179542 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.179573 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.282497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.282543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.282555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.282573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.282585 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.386563 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.386626 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.386643 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.386666 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.386684 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.489713 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.489787 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.489800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.489818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.489831 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.592042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.592146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.592158 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.592172 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.592180 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.600851 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.600984 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:33 crc kubenswrapper[4685]: E0129 16:27:33.601068 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.601118 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:33 crc kubenswrapper[4685]: E0129 16:27:33.601315 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:33 crc kubenswrapper[4685]: E0129 16:27:33.601575 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.605132 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 20:07:08.707821355 +0000 UTC Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.622293 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.646190 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.665566 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.686379 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.694684 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.694715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.694726 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.694741 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.694752 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.697898 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.717093 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.730595 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.742823 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.757961 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.773950 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.793207 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.797119 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.797169 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.797186 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.797211 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.797229 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.808752 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.830861 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.850148 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.865528 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.881112 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.902326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.902439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.902462 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.902493 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.902515 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:33Z","lastTransitionTime":"2026-01-29T16:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.920596 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.940961 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:33 crc kubenswrapper[4685]: I0129 16:27:33.960294 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:33Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.006816 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.006908 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.006935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.006967 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.006986 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.082772 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/3.log" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.083973 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/2.log" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.088109 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" exitCode=1 Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.088173 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.088247 4685 scope.go:117] "RemoveContainer" containerID="598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.090335 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:27:34 crc kubenswrapper[4685]: E0129 16:27:34.090852 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.108093 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.110739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.110796 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.110815 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.111126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.111176 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.122831 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.143135 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.161337 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.183742 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.201101 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.213660 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.213728 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.213746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.213771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.213789 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.216236 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.227951 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.240499 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.252067 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.275526 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.290988 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.303517 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.315426 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.316700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.316755 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.316773 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.316790 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.316801 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.329249 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.344277 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.359991 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.374446 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.400687 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:33Z\\\",\\\"message\\\":\\\"ule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8441, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8442, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8444, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:33.182796 6729 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF0129 16:27:33.182926 6729 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:34Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.420001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.420083 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.420101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.420152 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.420172 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.524062 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.524096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.524104 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.524119 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.524128 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.601622 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:34 crc kubenswrapper[4685]: E0129 16:27:34.601872 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.605796 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 08:47:23.915439302 +0000 UTC Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.626776 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.626827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.626839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.626858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.626870 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.730026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.730086 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.730103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.730130 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.730152 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.832364 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.832482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.832501 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.832528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.832548 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.935712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.935759 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.935771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.935789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:34 crc kubenswrapper[4685]: I0129 16:27:34.935802 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:34Z","lastTransitionTime":"2026-01-29T16:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.038522 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.038603 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.038631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.038664 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.038691 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.094562 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/3.log" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.142150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.142220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.142239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.142264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.142283 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.245252 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.245321 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.245345 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.245374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.245432 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.348244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.348313 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.348338 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.348366 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.348386 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.451039 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.451107 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.451127 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.451154 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.451172 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.554442 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.554541 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.554555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.554575 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.554589 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.601695 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.601799 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:35 crc kubenswrapper[4685]: E0129 16:27:35.601886 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.601902 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:35 crc kubenswrapper[4685]: E0129 16:27:35.602044 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:35 crc kubenswrapper[4685]: E0129 16:27:35.602258 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.606093 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 00:09:18.71095519 +0000 UTC Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.657763 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.657824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.657842 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.657868 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.657886 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.760386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.760457 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.760466 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.760481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.760490 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.864084 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.864160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.864180 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.864205 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.864223 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.967757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.967830 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.967853 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.967882 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:35 crc kubenswrapper[4685]: I0129 16:27:35.967899 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:35Z","lastTransitionTime":"2026-01-29T16:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.070871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.070974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.071002 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.071028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.071045 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.174239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.174305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.174329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.174358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.174377 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.277789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.277860 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.277883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.277916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.277942 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.381218 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.381260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.381271 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.381288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.381300 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.485058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.485164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.485185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.485421 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.485495 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.588737 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.588804 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.588827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.588856 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.588878 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.600700 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:36 crc kubenswrapper[4685]: E0129 16:27:36.600869 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.606902 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 19:28:49.843258426 +0000 UTC Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.691354 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.691477 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.691503 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.691526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.691543 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.793926 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.793988 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.794001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.794017 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.794027 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.897624 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.897694 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.897712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.897740 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:36 crc kubenswrapper[4685]: I0129 16:27:36.897758 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:36Z","lastTransitionTime":"2026-01-29T16:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.000472 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.000517 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.000528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.000544 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.000555 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.102932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.103009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.103063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.103097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.103120 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.205650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.205723 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.205746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.205774 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.205797 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.308706 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.308794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.308821 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.308853 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.308875 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.412751 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.412831 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.412846 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.412870 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.412890 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.515932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.516021 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.516102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.516173 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.516190 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.549135 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.549317 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549458 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.54937959 +0000 UTC m=+148.746735882 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549526 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.549540 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549592 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.549572904 +0000 UTC m=+148.746929206 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549648 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.549655 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.549691 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549699 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.549684637 +0000 UTC m=+148.747040939 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549795 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549816 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549838 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549909 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.549884262 +0000 UTC m=+148.747240554 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549906 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549943 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.549959 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.550023 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.550005425 +0000 UTC m=+148.747361727 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.601508 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.601530 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.601721 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.601753 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.601950 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:37 crc kubenswrapper[4685]: E0129 16:27:37.602089 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.607267 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 06:42:26.039064844 +0000 UTC Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.619491 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.619562 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.619581 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.619608 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.619626 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.723387 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.723598 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.723620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.723647 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.723664 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.827227 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.827302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.827322 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.827349 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.827368 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.930537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.930614 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.930630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.930651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:37 crc kubenswrapper[4685]: I0129 16:27:37.930664 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:37Z","lastTransitionTime":"2026-01-29T16:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.034598 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.034663 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.034681 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.034705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.034725 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.137672 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.137741 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.137761 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.137786 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.137806 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.240302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.240367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.240386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.240484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.240505 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.344462 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.344571 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.344596 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.344626 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.344647 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.360302 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:38 crc kubenswrapper[4685]: E0129 16:27:38.360588 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:27:38 crc kubenswrapper[4685]: E0129 16:27:38.360694 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs podName:28e2de4c-c48c-4160-87df-b5a8d213a203 nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.360668401 +0000 UTC m=+149.558024693 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs") pod "network-metrics-daemon-rpctp" (UID: "28e2de4c-c48c-4160-87df-b5a8d213a203") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.448125 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.448203 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.448220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.448238 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.448250 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.551951 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.552033 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.552050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.552075 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.552089 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.601127 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:38 crc kubenswrapper[4685]: E0129 16:27:38.601323 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.607901 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 17:13:36.822452648 +0000 UTC Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.654896 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.654955 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.654974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.655001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.655021 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.758573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.758637 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.758656 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.758681 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.758699 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.861960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.862042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.862069 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.862100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.862123 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.965371 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.966047 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.966077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.966100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:38 crc kubenswrapper[4685]: I0129 16:27:38.966117 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:38Z","lastTransitionTime":"2026-01-29T16:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.076309 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.076383 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.076439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.076553 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.076579 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.179461 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.179514 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.179526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.179545 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.179560 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.282450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.282525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.282548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.282577 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.282602 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.385341 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.385424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.385439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.385457 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.385468 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.488504 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.488543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.488555 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.488569 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.488578 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.590965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.591011 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.591020 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.591034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.591045 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.600783 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.600814 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.601004 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:39 crc kubenswrapper[4685]: E0129 16:27:39.601187 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:39 crc kubenswrapper[4685]: E0129 16:27:39.601296 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:39 crc kubenswrapper[4685]: E0129 16:27:39.601447 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.608381 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 16:25:10.539426139 +0000 UTC Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.693944 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.694016 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.694036 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.694063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.694082 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.802885 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.802965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.802996 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.803027 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.803135 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.906565 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.906632 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.906649 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.906674 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:39 crc kubenswrapper[4685]: I0129 16:27:39.906691 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:39Z","lastTransitionTime":"2026-01-29T16:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.009379 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.009450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.009465 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.009486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.009498 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.112332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.112380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.112432 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.112455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.112470 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.214523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.214580 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.214593 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.214612 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.214625 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.316790 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.316841 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.316857 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.316878 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.316891 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.419122 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.419190 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.419209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.419236 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.419258 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.522770 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.522833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.522849 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.522872 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.522889 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.601249 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:40 crc kubenswrapper[4685]: E0129 16:27:40.601592 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.608739 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 03:57:55.815384413 +0000 UTC Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.626455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.626525 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.626548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.626577 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.626600 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.729234 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.729291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.729308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.729369 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.729422 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.832845 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.832906 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.832923 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.832947 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.832963 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.935810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.935875 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.935895 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.935920 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:40 crc kubenswrapper[4685]: I0129 16:27:40.935940 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:40Z","lastTransitionTime":"2026-01-29T16:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.038754 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.038806 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.038824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.038847 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.038864 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.142299 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.142384 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.142450 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.142488 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.142513 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.246333 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.246455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.246489 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.246519 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.246540 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.280092 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.280174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.280191 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.280215 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.280232 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.299937 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.305286 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.305355 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.305374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.305443 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.305465 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.324734 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.330575 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.330634 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.330652 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.330676 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.330695 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.354799 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.361471 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.361516 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.361533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.361559 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.361583 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.376290 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.381600 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.381672 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.381729 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.381758 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.381777 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.402742 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:41Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.402981 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.405950 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.406204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.406244 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.406286 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.406313 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.510166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.510234 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.510251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.510278 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.510297 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.601733 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.601883 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.601918 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.602004 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.602135 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:41 crc kubenswrapper[4685]: E0129 16:27:41.602267 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.609128 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 20:31:46.555081888 +0000 UTC Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.613358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.613449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.613474 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.613498 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.613522 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.716895 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.716960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.716978 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.717004 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.717024 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.820423 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.820494 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.820504 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.820527 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.820540 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.924630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.924723 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.924743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.924773 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:41 crc kubenswrapper[4685]: I0129 16:27:41.924796 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:41Z","lastTransitionTime":"2026-01-29T16:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.028000 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.028093 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.028120 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.028144 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.028164 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.130097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.130174 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.130193 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.130224 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.130243 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.233907 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.233983 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.234008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.234046 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.234073 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.336428 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.336470 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.336481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.336499 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.336513 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.439290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.439341 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.439356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.439380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.439431 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.543063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.543137 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.543155 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.543183 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.543203 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.601670 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:42 crc kubenswrapper[4685]: E0129 16:27:42.601929 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.609817 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 02:41:27.823213928 +0000 UTC Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.646843 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.646925 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.646935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.646963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.646977 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.750371 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.750492 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.750507 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.750531 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.750548 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.853549 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.853680 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.853704 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.853732 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.853753 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.958494 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.958591 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.958605 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.958629 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:42 crc kubenswrapper[4685]: I0129 16:27:42.958643 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:42Z","lastTransitionTime":"2026-01-29T16:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.061374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.061495 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.061510 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.061529 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.061541 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.165068 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.165138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.165164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.165198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.165222 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.267743 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.267795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.267810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.267832 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.267849 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.370860 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.370939 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.371071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.371177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.371198 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.474953 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.475003 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.475017 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.475034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.475048 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.577642 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.577715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.577734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.577761 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.577778 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.601053 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.601106 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.601188 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:43 crc kubenswrapper[4685]: E0129 16:27:43.601522 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:43 crc kubenswrapper[4685]: E0129 16:27:43.601583 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:43 crc kubenswrapper[4685]: E0129 16:27:43.601658 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.610900 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 21:06:30.765416009 +0000 UTC Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.615571 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.625545 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.646078 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.664943 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.679864 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.681257 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.681315 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.681326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.681345 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.681355 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.697378 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.714678 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.725044 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.739894 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.754312 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.784449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.784512 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.784529 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.784561 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.784592 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.788270 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.805668 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.822916 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.845960 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.865741 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.885995 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.888247 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.888293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.888306 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.888324 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.888337 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.903386 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.920246 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.947372 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598e562f474c6a18fd2678b2f50661802db6511483404b7e5e0acd734a5644fa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:02Z\\\",\\\"message\\\":\\\"teMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-etcd-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:02.334931 6335 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console-operator/metrics]} name:Service_openshift-console-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.88:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ebd4748e-0473-49fb-88ad-83dbb221791a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:33Z\\\",\\\"message\\\":\\\"ule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8441, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8442, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8444, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:33.182796 6729 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF0129 16:27:33.182926 6729 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:43Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.990854 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.990912 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.990928 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.990952 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:43 crc kubenswrapper[4685]: I0129 16:27:43.990968 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:43Z","lastTransitionTime":"2026-01-29T16:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.094806 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.094868 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.094886 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.094916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.094934 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.198326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.198387 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.198432 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.198455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.198477 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.301747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.301810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.301829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.301858 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.301876 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.405239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.405295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.405312 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.405335 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.405352 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.507897 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.507937 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.507950 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.507968 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.507980 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.600714 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:44 crc kubenswrapper[4685]: E0129 16:27:44.600870 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.610861 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.610930 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.610962 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.610993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.610988 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 13:18:46.319739723 +0000 UTC Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.611018 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.714297 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.714429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.714456 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.714491 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.714521 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.817893 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.817968 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.817986 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.818011 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.818032 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.921607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.921690 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.921714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.921746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:44 crc kubenswrapper[4685]: I0129 16:27:44.921765 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:44Z","lastTransitionTime":"2026-01-29T16:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.025024 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.025094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.025112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.025138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.025154 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.129029 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.129111 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.129134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.129163 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.129185 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.232863 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.232927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.232945 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.232977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.232993 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.335654 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.335729 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.335781 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.335809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.335827 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.438879 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.438951 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.438974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.439001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.439018 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.542179 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.542268 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.542292 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.542323 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.542349 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.601589 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.601678 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.601621 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:45 crc kubenswrapper[4685]: E0129 16:27:45.601782 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:45 crc kubenswrapper[4685]: E0129 16:27:45.601913 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:45 crc kubenswrapper[4685]: E0129 16:27:45.602119 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.611129 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 16:57:48.242678484 +0000 UTC Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.645013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.645066 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.645079 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.645098 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.645111 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.748552 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.748620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.748642 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.748670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.748691 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.851511 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.851559 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.851569 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.851584 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.851593 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.954744 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.954812 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.954830 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.954853 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:45 crc kubenswrapper[4685]: I0129 16:27:45.955023 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:45Z","lastTransitionTime":"2026-01-29T16:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.058709 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.058792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.058812 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.058843 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.058863 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.162045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.162103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.162121 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.162145 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.162162 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.267632 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.267689 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.267707 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.267757 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.267775 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.371183 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.371273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.371295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.371321 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.371338 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.474435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.474494 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.474511 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.474537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.474557 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.577138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.577204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.577225 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.577251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.577269 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.601694 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:46 crc kubenswrapper[4685]: E0129 16:27:46.601889 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.612021 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 05:09:49.559561189 +0000 UTC Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.680718 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.680872 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.680894 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.680921 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.680938 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.784270 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.784336 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.784360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.784431 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.784457 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.887617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.887698 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.887720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.887749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.887769 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.991196 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.991255 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.991277 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.991305 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:46 crc kubenswrapper[4685]: I0129 16:27:46.991322 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:46Z","lastTransitionTime":"2026-01-29T16:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.095607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.095677 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.095704 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.095736 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.095760 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.199087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.199150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.199169 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.199197 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.199216 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.302439 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.302517 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.302540 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.302566 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.302583 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.405472 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.405533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.405551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.405578 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.405595 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.508324 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.508372 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.508387 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.508421 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.508432 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.600773 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.600814 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:47 crc kubenswrapper[4685]: E0129 16:27:47.601010 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.601028 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:47 crc kubenswrapper[4685]: E0129 16:27:47.601352 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:47 crc kubenswrapper[4685]: E0129 16:27:47.601923 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.611216 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.611270 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.611288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.611310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.611328 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.612376 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 01:02:46.286963465 +0000 UTC Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.714345 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.714440 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.714459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.714484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.714502 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.818273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.818325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.818346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.818372 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.818421 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.921073 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.921150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.921160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.921178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:47 crc kubenswrapper[4685]: I0129 16:27:47.921189 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:47Z","lastTransitionTime":"2026-01-29T16:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.023566 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.023606 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.023615 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.023630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.023638 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.126547 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.126600 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.126613 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.126636 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.126651 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.230708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.230777 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.230793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.230815 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.230828 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.334288 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.334336 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.334349 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.334372 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.334384 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.437528 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.437601 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.437622 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.437650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.437671 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.541596 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.541671 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.541684 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.541715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.541747 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.602094 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:48 crc kubenswrapper[4685]: E0129 16:27:48.603064 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.603619 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:27:48 crc kubenswrapper[4685]: E0129 16:27:48.603938 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.613727 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 10:04:26.145385764 +0000 UTC Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.620175 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.634970 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.646309 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.646425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.646446 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.646475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.646494 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.649460 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.665279 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.681910 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.700025 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.715103 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.729471 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.740886 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.748767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.748823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.748840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.748864 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.748883 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.764355 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.779664 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.790703 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.802445 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.821826 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:33Z\\\",\\\"message\\\":\\\"ule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8441, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8442, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8444, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:33.182796 6729 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF0129 16:27:33.182926 6729 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.838358 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.848208 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.850953 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.851031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.851057 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.851089 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.851113 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.859776 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.872232 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.882838 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:48Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.954766 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.954800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.954809 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.954823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:48 crc kubenswrapper[4685]: I0129 16:27:48.954832 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:48Z","lastTransitionTime":"2026-01-29T16:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.057686 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.057767 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.057778 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.057793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.057804 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.252780 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.253187 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.253360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.253561 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.253711 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.359583 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.359651 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.359674 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.359702 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.359725 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.463810 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.463874 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.463891 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.463918 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.463937 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.567530 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.567665 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.567687 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.567712 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.567729 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.601270 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.601328 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.601288 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:49 crc kubenswrapper[4685]: E0129 16:27:49.601533 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:49 crc kubenswrapper[4685]: E0129 16:27:49.601647 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:49 crc kubenswrapper[4685]: E0129 16:27:49.601800 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.614182 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 07:35:45.755010928 +0000 UTC Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.670963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.671022 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.671039 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.671066 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.671083 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.773742 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.773790 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.773807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.773830 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.773847 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.876793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.876862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.876880 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.876905 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.876922 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.979862 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.979921 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.979939 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.979965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:49 crc kubenswrapper[4685]: I0129 16:27:49.979983 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:49Z","lastTransitionTime":"2026-01-29T16:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.082990 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.083106 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.083133 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.083165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.083186 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.185488 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.185560 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.185584 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.185615 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.185640 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.288944 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.289028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.289052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.289082 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.289103 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.392489 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.392553 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.392572 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.392598 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.392617 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.496473 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.496521 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.496533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.496551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.496565 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.598722 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.598771 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.598783 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.598801 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.598815 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.600817 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:50 crc kubenswrapper[4685]: E0129 16:27:50.601094 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.615129 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 16:13:07.414107894 +0000 UTC Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.703523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.703561 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.703571 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.703585 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.703596 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.806960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.807004 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.807015 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.807031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.807042 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.909717 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.909777 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.909790 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.909807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:50 crc kubenswrapper[4685]: I0129 16:27:50.909819 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:50Z","lastTransitionTime":"2026-01-29T16:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.012792 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.012827 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.012853 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.012868 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.012879 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.115989 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.116053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.116070 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.116100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.116118 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.219545 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.219611 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.219630 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.219655 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.219677 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.322248 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.322293 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.322302 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.322318 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.322329 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.426927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.427032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.427050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.427074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.427091 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.517611 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.517664 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.517684 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.517708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.517726 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.537125 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.542314 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.542422 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.542444 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.542468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.542488 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.563525 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.568594 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.568708 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.568770 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.568799 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.568820 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.592278 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.597346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.597427 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.597445 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.597470 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.597488 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.600909 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.600953 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.601203 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.601284 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.601456 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.601582 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.616593 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 05:33:25.261660885 +0000 UTC Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.616800 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.620434 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.620477 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.620493 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.620515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.620532 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.640724 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c732ad58-05ea-4427-9404-d852b5b303c0\\\",\\\"systemUUID\\\":\\\"ecca5e7b-6a38-45e2-ba57-5071f788384c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:51Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:51 crc kubenswrapper[4685]: E0129 16:27:51.641048 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.643236 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.643277 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.643289 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.643307 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.643322 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.746430 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.746481 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.746570 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.746590 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.746602 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.851081 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.851214 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.851229 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.851250 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.851264 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.954008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.954063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.954076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.954097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:51 crc kubenswrapper[4685]: I0129 16:27:51.954110 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:51Z","lastTransitionTime":"2026-01-29T16:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.056492 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.056840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.056922 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.057022 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.057103 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.160635 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.160669 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.160679 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.160694 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.160702 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.262795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.262849 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.262873 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.262900 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.262919 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.366386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.366451 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.366460 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.366475 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.366520 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.469573 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.469617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.469628 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.469647 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.469658 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.572565 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.572627 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.572648 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.572675 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.572698 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.601459 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:52 crc kubenswrapper[4685]: E0129 16:27:52.602478 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.616990 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 13:33:40.33161776 +0000 UTC Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.675678 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.675718 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.675726 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.675740 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.675750 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.778548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.778607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.778617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.778635 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.778647 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.881713 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.881769 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.881785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.881807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.881822 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.984961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.985006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.985038 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.985053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:52 crc kubenswrapper[4685]: I0129 16:27:52.985064 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:52Z","lastTransitionTime":"2026-01-29T16:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.087206 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.087250 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.087259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.087275 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.087298 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.190460 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.190521 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.190534 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.190551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.190563 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.292551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.292610 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.292627 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.292646 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.292660 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.394520 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.394612 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.394639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.394666 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.394685 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.497276 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.497315 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.497325 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.497339 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.497348 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.599935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.599987 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.600003 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.600021 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.600042 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.601138 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.601142 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:53 crc kubenswrapper[4685]: E0129 16:27:53.601283 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:53 crc kubenswrapper[4685]: E0129 16:27:53.601224 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.601308 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:53 crc kubenswrapper[4685]: E0129 16:27:53.601562 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.617126 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 13:51:17.358936404 +0000 UTC Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.621644 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06f50dca-8527-4a1d-b8c4-0ba3704a4fd1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81e75a7dd7a65733448875221531df7dcc4d1d8be8acae1be4ccbe34ce302857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b12a99b3dcb33f60b5a79246b3a235b6121aa52f62b7db4e9fd18bc00d0da5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://695bb2d9ed7875aada35539cf7d8c9267f527550da5947f144fe1d0626b1c842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efa78f7f721e3d59f2550a3910216a256db8f4acac6a5a507daa9fe74119160e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91c414bc63394efa330509c129a2e80dec3a5e5e2a145350eb26b5c8aa119f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f18bd910285a3e92d31170af81f9bf642f6307f6aec462d5ed1063e724bfd99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a66552b562962621dedbf33fcf0e27576bfc31328bba0f7a82f6b49e8dc3ce6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://125f165c7559451c6aad53c3d61dbfd3d0ce8626b864626a8dd5ecc7d226468a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.632584 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.644968 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb6a45c9e51a507d1123367482a7a146f38beae4142c8f0fcbb68d038bfcfea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.654456 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6ceb996f-40db-4a5d-915d-dd0f6d926751\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bdb1a0edac38f77e7e5ccdd350af32458bd5ca5695ede6a0d1960d2889e0359f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cnwhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbcwf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.675940 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a471f311-f227-4d4c-aed3-e783d2ef39bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b775eff4c49cc99a6a1a3d451e4806417c2a2096f56f9a1a1461d3340ebca1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2790c34df30ad05f64347ff9ec7a435532c79cd083f5df3c5fc221951499fe4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d3c6a86c2f5874b4607328e80f88d05fb8db320e2dadc19b0e74c6d610f2d94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.689195 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c63955b5a3fcb798ec05637b83fbbbbebda42b268555a065660ba097bef106\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.701998 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.702034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.702042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.702058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.702067 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.707002 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.720907 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c427717afd1726de77e6f0e7abd5596806443e07814a22dc931318606d51427f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99bc22ed35f80347fc8810939afa16cd98c717d9bdc380f389819921070b1289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.740685 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00a79737-7411-4a3e-8c27-e1e16951a730\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:33Z\\\",\\\"message\\\":\\\"ule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8441, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8442, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.167\\\\\\\", Port:8444, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0129 16:27:33.182796 6729 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF0129 16:27:33.182926 6729 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:27:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jskgn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pjcfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.753108 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4d558ed-2b92-4611-afae-4fd0d528cd48\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05c5e27ad0769267313696bd4690ccf686fef58698e1e4f86a6da5a01c491007\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a749d51a68b86eaed7f062c374d40c11aa85c820bea63a31084cc1a9a966c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8808d7b20e746252c0ab33653ede3a739bebab0ced8846bea9ddfae58bc7d30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6803da857a1f8aa584a5104eb634b144d9d1dad8c4955a4aab65d874e76a5235\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.764865 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0ba8e52-a78e-4f60-832d-3455014b1274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a34d7de6add2aa924963aa3027487dfb44041c7b87a8a437e35bd5da275fc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aca582daeec2d464f34634612ec8059cb35f751a4bad85e0f07af7bf13d77b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.777659 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8dlmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c6b72-980a-4110-b259-c4b54f0ea9b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-29T16:27:21Z\\\",\\\"message\\\":\\\"2026-01-29T16:26:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836\\\\n2026-01-29T16:26:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c366a053-31a5-41f6-91e0-27f56f3cc836 to /host/opt/cni/bin/\\\\n2026-01-29T16:26:36Z [verbose] multus-daemon started\\\\n2026-01-29T16:26:36Z [verbose] Readiness Indicator file check\\\\n2026-01-29T16:27:21Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:27:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tlcvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8dlmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.790843 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cf07450-4dbf-44c7-a654-3ea7347ac7ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9239c6665191bc72fda446b4967559135392d1f4726617fefd3780d7ab8ac693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb56607d3c5e35963955867c048a72c26859fa1abdc224a158c031ad794237ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b05a845e0654bf870859bb17c8bc567e5a9e76e479dd8e800b61911f8dd92dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://389bee1fa583318694996eab0982833f56e2096c136a4bd11251520f0e3b6211\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://787fbb6fbe18804357513c47a84096e0382571c3aa77ead3ca13c5b18f4bc5c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://359e7c98e5616f73a3e5b8b9360005f879389f92f7c7743d418d745b58732597\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de1b2c745579c0eb42328d17d03a413b3f10a489908bea1f3e5bbd27b30e9af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjffv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pxv5d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805340 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805440 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.805546 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4635678-889c-4e33-82fc-fb316db41117\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"le observer\\\\nW0129 16:26:33.529848 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0129 16:26:33.530024 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0129 16:26:33.531104 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1472341609/tls.crt::/tmp/serving-cert-1472341609/tls.key\\\\\\\"\\\\nI0129 16:26:33.869693 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0129 16:26:33.884707 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0129 16:26:33.885493 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0129 16:26:33.885539 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0129 16:26:33.885545 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0129 16:26:33.895081 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0129 16:26:33.895116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895122 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0129 16:26:33.895139 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0129 16:26:33.895144 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0129 16:26:33.895152 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0129 16:26:33.895155 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0129 16:26:33.896705 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0129 16:26:33.899766 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-29T16:26:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-29T16:26:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:13Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.819035 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.829426 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-qlnx7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2ab808a-6378-45c6-98a8-f48acf2d9235\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60c4c78180666f8d294d9ab9cc7a5fdfd70d7657be344a9c1ea5c0252b483ecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjd4g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-qlnx7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.841531 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-r99mt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0319ef8a-068b-4985-a398-8648207e77a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d4cd943e32af29ccf5a4049dbbfafa316a9d5927d11e84153c8947f0f78ae11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7dcxr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-r99mt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.853874 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpctp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e2de4c-c48c-4160-87df-b5a8d213a203\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dqclw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpctp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.876991 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65c9dddd-ada6-4134-af1e-d725cc23c354\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-29T16:26:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d90e355e6f4da8e90a710d220c99fa6a46fedb2418a1e2468b781ac20898ba1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a90834b39dbe43a6f74982513aa1aa1ea93f5febc660f51088920cbab9acddcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-29T16:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82rbs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-29T16:26:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xgsh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-29T16:27:53Z is after 2025-08-24T17:21:41Z" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.909087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.909159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.909177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.909204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:53 crc kubenswrapper[4685]: I0129 16:27:53.909224 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:53Z","lastTransitionTime":"2026-01-29T16:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.012274 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.012360 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.012380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.012452 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.012473 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.115653 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.115721 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.115732 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.115749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.115761 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.218929 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.219006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.219026 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.219055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.219074 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.322932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.323032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.323052 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.323078 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.323094 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.425665 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.425733 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.425751 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.425775 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.425794 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.528358 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.528424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.528435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.528449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.528459 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.601238 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:54 crc kubenswrapper[4685]: E0129 16:27:54.601549 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.617680 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 05:01:43.908068863 +0000 UTC Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.631113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.631178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.631198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.631222 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.631245 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.734090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.734148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.734161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.734178 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.734192 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.837727 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.837800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.837820 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.837849 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.837869 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.941763 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.941824 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.941839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.941866 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:54 crc kubenswrapper[4685]: I0129 16:27:54.941882 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:54Z","lastTransitionTime":"2026-01-29T16:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.046309 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.046425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.046445 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.046476 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.046496 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.149714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.149776 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.149791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.149815 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.149831 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.252999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.253058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.253077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.253102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.253120 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.356670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.356794 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.356818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.356888 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.356922 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.460228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.460289 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.460310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.460335 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.460354 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.563061 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.563123 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.563145 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.563170 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.563191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.601247 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.601339 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.601492 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:55 crc kubenswrapper[4685]: E0129 16:27:55.601481 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:55 crc kubenswrapper[4685]: E0129 16:27:55.601654 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:55 crc kubenswrapper[4685]: E0129 16:27:55.601788 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.618720 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 12:19:44.988185568 +0000 UTC Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.666181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.666229 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.666239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.666256 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.666267 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.769277 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.769356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.769374 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.769557 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.769579 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.871963 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.872034 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.872054 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.872078 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.872095 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.974787 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.974851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.974868 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.974893 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:55 crc kubenswrapper[4685]: I0129 16:27:55.974912 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:55Z","lastTransitionTime":"2026-01-29T16:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.084071 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.084135 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.084154 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.084185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.084208 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.187435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.187479 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.187488 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.187505 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.187516 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.290037 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.290089 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.290100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.290117 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.290130 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.393833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.393883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.393920 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.393941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.393956 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.496715 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.496778 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.496795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.496821 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.496838 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600457 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600510 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600527 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600549 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600562 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.600685 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:56 crc kubenswrapper[4685]: E0129 16:27:56.600870 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.619387 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 17:41:44.369330949 +0000 UTC Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.704159 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.704221 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.704233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.704253 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.704266 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.808785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.808867 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.808887 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.808912 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.808931 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.911879 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.911958 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.911973 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.912001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:56 crc kubenswrapper[4685]: I0129 16:27:56.912018 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:56Z","lastTransitionTime":"2026-01-29T16:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.015112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.015173 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.015199 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.015232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.015255 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.118137 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.118195 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.118212 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.118235 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.118250 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.221582 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.221643 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.221670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.221695 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.221713 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.324880 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.324971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.324999 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.325030 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.325060 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.427882 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.427955 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.427981 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.428013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.428041 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.531133 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.531232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.531263 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.531296 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.531320 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.600972 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.601053 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:57 crc kubenswrapper[4685]: E0129 16:27:57.601174 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.601261 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:57 crc kubenswrapper[4685]: E0129 16:27:57.601553 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:57 crc kubenswrapper[4685]: E0129 16:27:57.601691 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.620139 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:54:50.958820014 +0000 UTC Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.634603 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.634655 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.634676 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.634700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.634720 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.737259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.737291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.737301 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.737315 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.737325 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.840749 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.840807 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.840826 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.840852 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.840873 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.944416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.944482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.944523 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.944558 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:57 crc kubenswrapper[4685]: I0129 16:27:57.944582 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:57Z","lastTransitionTime":"2026-01-29T16:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.047552 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.047620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.047641 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.047664 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.047681 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.151572 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.151633 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.151649 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.151670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.151695 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.254832 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.254891 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.254905 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.254929 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.254944 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.358006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.358055 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.358072 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.358093 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.358106 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.460791 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.460829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.460839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.460852 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.460863 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.563839 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.563892 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.563905 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.563923 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.563937 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.601480 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:27:58 crc kubenswrapper[4685]: E0129 16:27:58.601878 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.620805 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 18:01:44.453818371 +0000 UTC Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.666746 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.667042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.667113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.667177 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.667364 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.772924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.772980 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.772992 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.773011 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.773025 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.876150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.876216 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.876232 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.876259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.876275 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.979867 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.979930 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.979950 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.979977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:58 crc kubenswrapper[4685]: I0129 16:27:58.980001 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:58Z","lastTransitionTime":"2026-01-29T16:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.084220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.084317 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.084347 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.084385 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.084480 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.187607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.187683 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.187714 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.187748 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.187775 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.290151 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.290228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.290243 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.290265 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.290281 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.393705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.393779 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.393798 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.393826 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.393847 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.497729 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.497859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.497890 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.497927 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.497947 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.600966 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601094 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:27:59 crc kubenswrapper[4685]: E0129 16:27:59.601131 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:27:59 crc kubenswrapper[4685]: E0129 16:27:59.601304 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601735 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601851 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.601904 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.602167 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:27:59 crc kubenswrapper[4685]: E0129 16:27:59.602358 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.620979 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 09:05:26.296364051 +0000 UTC Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.703977 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.704029 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.704046 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.704068 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.704084 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.807045 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.807148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.807179 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.807219 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.807245 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.910526 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.910625 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.910648 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.910674 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:27:59 crc kubenswrapper[4685]: I0129 16:27:59.910692 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:27:59Z","lastTransitionTime":"2026-01-29T16:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.013974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.014096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.014116 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.014141 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.014168 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.116793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.116860 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.116881 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.116904 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.116925 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.219919 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.219972 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.219985 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.220006 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.220022 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.323031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.323073 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.323084 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.323099 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.323110 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.425871 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.425925 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.425934 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.425949 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.425998 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.528308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.528345 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.528353 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.528366 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.528376 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.601127 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:00 crc kubenswrapper[4685]: E0129 16:28:00.601313 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.621452 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 19:43:41.421572686 +0000 UTC Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.630414 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.630464 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.630476 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.630490 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.630499 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.733194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.733251 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.733260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.733273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.733283 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.836028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.836070 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.836080 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.836096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.836108 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.938281 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.938330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.938344 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.938361 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:00 crc kubenswrapper[4685]: I0129 16:28:00.938374 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:00Z","lastTransitionTime":"2026-01-29T16:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.041756 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.041818 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.041836 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.041859 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.041877 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.145115 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.145356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.145367 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.145382 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.145406 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.248254 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.248318 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.248336 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.248364 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.248382 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.351559 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.351623 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.351639 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.351660 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.351674 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.456076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.456132 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.456146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.456162 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.456373 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.560123 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.560188 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.560204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.560221 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.560233 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.601014 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.601066 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.601019 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:01 crc kubenswrapper[4685]: E0129 16:28:01.601189 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:01 crc kubenswrapper[4685]: E0129 16:28:01.601279 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:01 crc kubenswrapper[4685]: E0129 16:28:01.601357 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.622110 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:43:42.567188332 +0000 UTC Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.663133 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.663227 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.663239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.663259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.663274 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.712418 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.712467 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.712478 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.712513 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.712531 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-29T16:28:01Z","lastTransitionTime":"2026-01-29T16:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.764926 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln"] Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.765412 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.767827 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.767878 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.767965 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.768005 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.797973 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=87.797940382 podStartE2EDuration="1m27.797940382s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.797361118 +0000 UTC m=+108.994717410" watchObservedRunningTime="2026-01-29 16:28:01.797940382 +0000 UTC m=+108.995296644" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.834478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.834681 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.836473 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.836693 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.836763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.853305 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podStartSLOduration=88.853283594 podStartE2EDuration="1m28.853283594s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.840258434 +0000 UTC m=+109.037614706" watchObservedRunningTime="2026-01-29 16:28:01.853283594 +0000 UTC m=+109.050639856" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.869336 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=86.8693201 podStartE2EDuration="1m26.8693201s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.854021762 +0000 UTC m=+109.051378074" watchObservedRunningTime="2026-01-29 16:28:01.8693201 +0000 UTC m=+109.066676372" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937324 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937374 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937429 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937459 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937473 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937567 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.937632 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.938552 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.945082 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.948275 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=60.948256519 podStartE2EDuration="1m0.948256519s" podCreationTimestamp="2026-01-29 16:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.947260314 +0000 UTC m=+109.144616586" watchObservedRunningTime="2026-01-29 16:28:01.948256519 +0000 UTC m=+109.145612791" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.956582 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe9fdc6f-d026-4c42-ad65-c2815c6bfa06-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bslln\" (UID: \"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.968749 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=37.968732807 podStartE2EDuration="37.968732807s" podCreationTimestamp="2026-01-29 16:27:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.957031571 +0000 UTC m=+109.154387843" watchObservedRunningTime="2026-01-29 16:28:01.968732807 +0000 UTC m=+109.166089069" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.982554 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-pxv5d" podStartSLOduration=88.982531846 podStartE2EDuration="1m28.982531846s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.982503516 +0000 UTC m=+109.179859798" watchObservedRunningTime="2026-01-29 16:28:01.982531846 +0000 UTC m=+109.179888128" Jan 29 16:28:01 crc kubenswrapper[4685]: I0129 16:28:01.983231 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8dlmj" podStartSLOduration=88.983225304 podStartE2EDuration="1m28.983225304s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:01.968545592 +0000 UTC m=+109.165901864" watchObservedRunningTime="2026-01-29 16:28:01.983225304 +0000 UTC m=+109.180581586" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.009847 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.009830118 podStartE2EDuration="1m28.009830118s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:02.008888504 +0000 UTC m=+109.206244786" watchObservedRunningTime="2026-01-29 16:28:02.009830118 +0000 UTC m=+109.207186380" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.061426 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-qlnx7" podStartSLOduration=89.061408213 podStartE2EDuration="1m29.061408213s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:02.061197188 +0000 UTC m=+109.258553460" watchObservedRunningTime="2026-01-29 16:28:02.061408213 +0000 UTC m=+109.258764475" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.081706 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-r99mt" podStartSLOduration=89.081687476 podStartE2EDuration="1m29.081687476s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:02.081453441 +0000 UTC m=+109.278809713" watchObservedRunningTime="2026-01-29 16:28:02.081687476 +0000 UTC m=+109.279043738" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.082566 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.298384 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" event={"ID":"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06","Type":"ContainerStarted","Data":"9395df384ab3770545a3314364f9e58f8ede639c7441b6e6e22b7d976a0b3277"} Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.298843 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" event={"ID":"fe9fdc6f-d026-4c42-ad65-c2815c6bfa06","Type":"ContainerStarted","Data":"7e397ff691de1a084ced18b5d97bfd082934dda418b8cd39643b63bb8975ddf4"} Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.311211 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bslln" podStartSLOduration=88.311188627 podStartE2EDuration="1m28.311188627s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:02.310530671 +0000 UTC m=+109.507886983" watchObservedRunningTime="2026-01-29 16:28:02.311188627 +0000 UTC m=+109.508544899" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.311721 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xgsh5" podStartSLOduration=87.311714911 podStartE2EDuration="1m27.311714911s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:02.116196671 +0000 UTC m=+109.313552933" watchObservedRunningTime="2026-01-29 16:28:02.311714911 +0000 UTC m=+109.509071193" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.601669 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:02 crc kubenswrapper[4685]: E0129 16:28:02.601810 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.602372 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:28:02 crc kubenswrapper[4685]: E0129 16:28:02.602590 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pjcfc_openshift-ovn-kubernetes(00a79737-7411-4a3e-8c27-e1e16951a730)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.622994 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 00:15:38.494977225 +0000 UTC Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.623094 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 29 16:28:02 crc kubenswrapper[4685]: I0129 16:28:02.632620 4685 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 29 16:28:03 crc kubenswrapper[4685]: I0129 16:28:03.600882 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:03 crc kubenswrapper[4685]: I0129 16:28:03.600882 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:03 crc kubenswrapper[4685]: I0129 16:28:03.600901 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:03 crc kubenswrapper[4685]: E0129 16:28:03.601846 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:03 crc kubenswrapper[4685]: E0129 16:28:03.601927 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:03 crc kubenswrapper[4685]: E0129 16:28:03.602023 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:04 crc kubenswrapper[4685]: I0129 16:28:04.601294 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:04 crc kubenswrapper[4685]: E0129 16:28:04.602557 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:05 crc kubenswrapper[4685]: I0129 16:28:05.601293 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:05 crc kubenswrapper[4685]: I0129 16:28:05.601375 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:05 crc kubenswrapper[4685]: I0129 16:28:05.601311 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:05 crc kubenswrapper[4685]: E0129 16:28:05.601492 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:05 crc kubenswrapper[4685]: E0129 16:28:05.601648 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:05 crc kubenswrapper[4685]: E0129 16:28:05.601691 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:06 crc kubenswrapper[4685]: I0129 16:28:06.600838 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:06 crc kubenswrapper[4685]: E0129 16:28:06.601017 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:07 crc kubenswrapper[4685]: I0129 16:28:07.601028 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:07 crc kubenswrapper[4685]: I0129 16:28:07.601116 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:07 crc kubenswrapper[4685]: I0129 16:28:07.601224 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:07 crc kubenswrapper[4685]: E0129 16:28:07.601221 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:07 crc kubenswrapper[4685]: E0129 16:28:07.601469 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:07 crc kubenswrapper[4685]: E0129 16:28:07.601786 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.326523 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/1.log" Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.327759 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/0.log" Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.327856 4685 generic.go:334] "Generic (PLEG): container finished" podID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" containerID="d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11" exitCode=1 Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.327903 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerDied","Data":"d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11"} Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.327954 4685 scope.go:117] "RemoveContainer" containerID="8630a90a645305540be0c67879e22b7ec9f232f4e189e128be3e410e1805e83f" Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.328696 4685 scope.go:117] "RemoveContainer" containerID="d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11" Jan 29 16:28:08 crc kubenswrapper[4685]: E0129 16:28:08.328999 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8dlmj_openshift-multus(d75c6b72-980a-4110-b259-c4b54f0ea9b2)\"" pod="openshift-multus/multus-8dlmj" podUID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" Jan 29 16:28:08 crc kubenswrapper[4685]: I0129 16:28:08.601138 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:08 crc kubenswrapper[4685]: E0129 16:28:08.601288 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:09 crc kubenswrapper[4685]: I0129 16:28:09.331990 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/1.log" Jan 29 16:28:09 crc kubenswrapper[4685]: I0129 16:28:09.601134 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:09 crc kubenswrapper[4685]: E0129 16:28:09.601264 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:09 crc kubenswrapper[4685]: I0129 16:28:09.601319 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:09 crc kubenswrapper[4685]: I0129 16:28:09.601319 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:09 crc kubenswrapper[4685]: E0129 16:28:09.601466 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:09 crc kubenswrapper[4685]: E0129 16:28:09.601490 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:10 crc kubenswrapper[4685]: I0129 16:28:10.601120 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:10 crc kubenswrapper[4685]: E0129 16:28:10.601285 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:11 crc kubenswrapper[4685]: I0129 16:28:11.601727 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:11 crc kubenswrapper[4685]: I0129 16:28:11.601795 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:11 crc kubenswrapper[4685]: I0129 16:28:11.601907 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:11 crc kubenswrapper[4685]: E0129 16:28:11.602022 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:11 crc kubenswrapper[4685]: E0129 16:28:11.602179 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:11 crc kubenswrapper[4685]: E0129 16:28:11.602430 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:12 crc kubenswrapper[4685]: I0129 16:28:12.601533 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:12 crc kubenswrapper[4685]: E0129 16:28:12.601680 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:13 crc kubenswrapper[4685]: E0129 16:28:13.594672 4685 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 29 16:28:13 crc kubenswrapper[4685]: I0129 16:28:13.601025 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:13 crc kubenswrapper[4685]: I0129 16:28:13.601067 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:13 crc kubenswrapper[4685]: E0129 16:28:13.602241 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:13 crc kubenswrapper[4685]: I0129 16:28:13.602290 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:13 crc kubenswrapper[4685]: E0129 16:28:13.602528 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:13 crc kubenswrapper[4685]: E0129 16:28:13.602560 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:13 crc kubenswrapper[4685]: E0129 16:28:13.705869 4685 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 29 16:28:14 crc kubenswrapper[4685]: I0129 16:28:14.601301 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:14 crc kubenswrapper[4685]: E0129 16:28:14.601628 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:15 crc kubenswrapper[4685]: I0129 16:28:15.601143 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:15 crc kubenswrapper[4685]: E0129 16:28:15.601271 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:15 crc kubenswrapper[4685]: I0129 16:28:15.601514 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:15 crc kubenswrapper[4685]: E0129 16:28:15.601573 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:15 crc kubenswrapper[4685]: I0129 16:28:15.601716 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:15 crc kubenswrapper[4685]: E0129 16:28:15.601859 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:16 crc kubenswrapper[4685]: I0129 16:28:16.601094 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:16 crc kubenswrapper[4685]: E0129 16:28:16.601262 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:17 crc kubenswrapper[4685]: I0129 16:28:17.600863 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:17 crc kubenswrapper[4685]: I0129 16:28:17.600914 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:17 crc kubenswrapper[4685]: I0129 16:28:17.600942 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:17 crc kubenswrapper[4685]: E0129 16:28:17.601019 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:17 crc kubenswrapper[4685]: E0129 16:28:17.601215 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:17 crc kubenswrapper[4685]: E0129 16:28:17.601667 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:17 crc kubenswrapper[4685]: I0129 16:28:17.601945 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.360414 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/3.log" Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.362808 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerStarted","Data":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.363339 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.405579 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podStartSLOduration=105.405564125 podStartE2EDuration="1m45.405564125s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:18.405034461 +0000 UTC m=+125.602390773" watchObservedRunningTime="2026-01-29 16:28:18.405564125 +0000 UTC m=+125.602920377" Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.513500 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rpctp"] Jan 29 16:28:18 crc kubenswrapper[4685]: I0129 16:28:18.513670 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:18 crc kubenswrapper[4685]: E0129 16:28:18.513850 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:18 crc kubenswrapper[4685]: E0129 16:28:18.708827 4685 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 29 16:28:19 crc kubenswrapper[4685]: I0129 16:28:19.601674 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:19 crc kubenswrapper[4685]: I0129 16:28:19.601724 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:19 crc kubenswrapper[4685]: I0129 16:28:19.601803 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:19 crc kubenswrapper[4685]: E0129 16:28:19.602245 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:19 crc kubenswrapper[4685]: E0129 16:28:19.603604 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:19 crc kubenswrapper[4685]: E0129 16:28:19.603982 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:20 crc kubenswrapper[4685]: I0129 16:28:20.601550 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:20 crc kubenswrapper[4685]: E0129 16:28:20.601696 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:21 crc kubenswrapper[4685]: I0129 16:28:21.601185 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:21 crc kubenswrapper[4685]: I0129 16:28:21.601233 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:21 crc kubenswrapper[4685]: I0129 16:28:21.601206 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:21 crc kubenswrapper[4685]: E0129 16:28:21.601344 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:21 crc kubenswrapper[4685]: E0129 16:28:21.601503 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:21 crc kubenswrapper[4685]: E0129 16:28:21.601801 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:22 crc kubenswrapper[4685]: I0129 16:28:22.601483 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:22 crc kubenswrapper[4685]: I0129 16:28:22.601950 4685 scope.go:117] "RemoveContainer" containerID="d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11" Jan 29 16:28:22 crc kubenswrapper[4685]: E0129 16:28:22.601990 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:23 crc kubenswrapper[4685]: I0129 16:28:23.381581 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/1.log" Jan 29 16:28:23 crc kubenswrapper[4685]: I0129 16:28:23.381665 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerStarted","Data":"7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3"} Jan 29 16:28:23 crc kubenswrapper[4685]: I0129 16:28:23.600626 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:23 crc kubenswrapper[4685]: I0129 16:28:23.600622 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:23 crc kubenswrapper[4685]: E0129 16:28:23.601791 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:23 crc kubenswrapper[4685]: I0129 16:28:23.601806 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:23 crc kubenswrapper[4685]: E0129 16:28:23.601869 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:23 crc kubenswrapper[4685]: E0129 16:28:23.601920 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:23 crc kubenswrapper[4685]: E0129 16:28:23.709986 4685 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 29 16:28:24 crc kubenswrapper[4685]: I0129 16:28:24.601231 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:24 crc kubenswrapper[4685]: E0129 16:28:24.601648 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:25 crc kubenswrapper[4685]: I0129 16:28:25.601630 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:25 crc kubenswrapper[4685]: I0129 16:28:25.601780 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:25 crc kubenswrapper[4685]: I0129 16:28:25.601640 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:25 crc kubenswrapper[4685]: E0129 16:28:25.601818 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:25 crc kubenswrapper[4685]: E0129 16:28:25.602008 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:25 crc kubenswrapper[4685]: E0129 16:28:25.602194 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:26 crc kubenswrapper[4685]: I0129 16:28:26.601522 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:26 crc kubenswrapper[4685]: E0129 16:28:26.601749 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:27 crc kubenswrapper[4685]: I0129 16:28:27.601560 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:27 crc kubenswrapper[4685]: I0129 16:28:27.601721 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:27 crc kubenswrapper[4685]: E0129 16:28:27.601800 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 29 16:28:27 crc kubenswrapper[4685]: E0129 16:28:27.601721 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 29 16:28:27 crc kubenswrapper[4685]: I0129 16:28:27.601560 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:27 crc kubenswrapper[4685]: E0129 16:28:27.602080 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 29 16:28:28 crc kubenswrapper[4685]: I0129 16:28:28.601012 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:28 crc kubenswrapper[4685]: E0129 16:28:28.601176 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpctp" podUID="28e2de4c-c48c-4160-87df-b5a8d213a203" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.600725 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.600831 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.600944 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.603328 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.606092 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.608061 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 29 16:28:29 crc kubenswrapper[4685]: I0129 16:28:29.608074 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 29 16:28:30 crc kubenswrapper[4685]: I0129 16:28:30.601439 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:30 crc kubenswrapper[4685]: I0129 16:28:30.605071 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 29 16:28:30 crc kubenswrapper[4685]: I0129 16:28:30.608786 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.282164 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.320778 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.321240 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.323592 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.324542 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.328693 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hlrxs"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.329716 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.331074 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.331682 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.333196 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.333557 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.336515 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.336871 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.337326 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.337453 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.337693 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.337787 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338004 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338086 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338254 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338317 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338438 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338582 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338681 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338720 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338839 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338265 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.338443 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.340044 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.340678 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.341501 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.341820 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.342068 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.342134 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.342231 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.342326 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.343660 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.343815 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.344181 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.344493 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.347837 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9cgjg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.348434 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.349635 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-jmf78"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.376944 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.383927 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.384438 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.391048 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.391437 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.391684 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392095 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392274 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392435 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392446 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392693 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392777 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.392920 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.393014 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.393199 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.393454 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.394211 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5ljxg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.394849 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395050 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395632 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395790 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395808 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395893 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.395975 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396053 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396150 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396278 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396419 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396487 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396610 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396702 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396742 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396735 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396850 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.396988 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.397024 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.397075 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.397111 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.397155 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.397360 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.398375 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.398604 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.398773 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.398915 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.399780 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.400413 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.400662 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401241 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401274 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-policies\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401320 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk9m7\" (UniqueName: \"kubernetes.io/projected/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-kube-api-access-lk9m7\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401345 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/244a8f0b-1588-493c-bc5e-20eba91a40d5-serving-cert\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401372 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gll2\" (UniqueName: \"kubernetes.io/projected/2b96dae6-a630-4803-ac16-b0853fa32542-kube-api-access-8gll2\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401429 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6420c1b6-8ea6-43b7-9957-5282c8b73a76-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401449 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-dir\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401470 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401493 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-serving-cert\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401512 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-serving-cert\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401564 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-auth-proxy-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401587 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z2wk\" (UniqueName: \"kubernetes.io/projected/8e536769-03e0-4bc4-9947-a53d789f5b08-kube-api-access-2z2wk\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401610 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401632 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401653 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401679 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-image-import-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401699 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401719 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401740 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/244a8f0b-1588-493c-bc5e-20eba91a40d5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401783 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8vvr\" (UniqueName: \"kubernetes.io/projected/16a9553a-1a4c-4b95-a9c8-561696a06ce6-kube-api-access-m8vvr\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401807 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401827 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v28j\" (UniqueName: \"kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401849 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-serving-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401870 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.401892 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-trusted-ca\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402018 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8e536769-03e0-4bc4-9947-a53d789f5b08-machine-approver-tls\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402048 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402091 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn8ms\" (UniqueName: \"kubernetes.io/projected/6420c1b6-8ea6-43b7-9957-5282c8b73a76-kube-api-access-gn8ms\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402188 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6420c1b6-8ea6-43b7-9957-5282c8b73a76-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402223 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-audit\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402242 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402268 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402292 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7n9h\" (UniqueName: \"kubernetes.io/projected/e3f08bb7-dc95-4b73-b849-cee551c94ac7-kube-api-access-l7n9h\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402306 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402323 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p9jd\" (UniqueName: \"kubernetes.io/projected/244a8f0b-1588-493c-bc5e-20eba91a40d5-kube-api-access-2p9jd\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402345 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402363 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e3f08bb7-dc95-4b73-b849-cee551c94ac7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402381 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-audit-dir\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402432 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402450 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402470 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402486 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-client\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402529 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402592 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-node-pullsecrets\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402615 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16a9553a-1a4c-4b95-a9c8-561696a06ce6-serving-cert\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402672 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-client\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402715 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-encryption-config\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402737 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-config\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.402760 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-encryption-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.405569 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.410431 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.413646 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.415348 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.415792 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.416414 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.419677 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.419825 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.420260 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-hndrh"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.420555 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.420757 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.423173 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.423350 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.423569 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.423755 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.423916 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424028 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424141 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424253 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424502 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424612 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424724 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424836 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424877 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424945 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.425043 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.425149 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.425253 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.424837 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.457574 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.458199 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zbcw2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.458643 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lmlp9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.458950 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.459470 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.459681 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.460780 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.461519 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.461873 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.462100 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.462244 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.465880 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-87lv2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.466384 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.466799 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.467071 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.467983 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.468166 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.469523 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.470056 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.470611 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.470876 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.471063 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.471810 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.471944 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.472404 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.475960 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.477177 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.477904 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.478075 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.478240 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.478325 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.478635 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ctzzw"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.480065 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.480364 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.480700 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.481071 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.481616 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.482736 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.483133 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.485573 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.488559 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-vqfz4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489135 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cdm4c"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489573 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489723 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489763 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489874 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.489998 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.490005 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.490033 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.490657 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.492287 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.492879 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.493277 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.493666 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.494678 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.495526 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.523868 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.527755 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529452 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-encryption-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529536 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529587 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529594 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6pfv\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-kube-api-access-h6pfv\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529826 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529868 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-policies\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529899 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk9m7\" (UniqueName: \"kubernetes.io/projected/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-kube-api-access-lk9m7\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529924 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529950 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/244a8f0b-1588-493c-bc5e-20eba91a40d5-serving-cert\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.529976 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gll2\" (UniqueName: \"kubernetes.io/projected/2b96dae6-a630-4803-ac16-b0853fa32542-kube-api-access-8gll2\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530016 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp6r5\" (UniqueName: \"kubernetes.io/projected/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-kube-api-access-vp6r5\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530046 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91bab5c-63b7-41f5-b761-5c50c83da5d0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530063 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r5ws\" (UniqueName: \"kubernetes.io/projected/eefb747e-9461-4086-8f22-03f6730c26ed-kube-api-access-6r5ws\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530092 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6420c1b6-8ea6-43b7-9957-5282c8b73a76-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530120 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-dir\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530149 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6lrn\" (UniqueName: \"kubernetes.io/projected/8bac27b6-1031-4fe6-8af0-0fd0c026940a-kube-api-access-f6lrn\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530174 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530201 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530223 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530232 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530247 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-oauth-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530272 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqwc6\" (UniqueName: \"kubernetes.io/projected/a91bab5c-63b7-41f5-b761-5c50c83da5d0-kube-api-access-mqwc6\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530298 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc4pv\" (UniqueName: \"kubernetes.io/projected/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-kube-api-access-nc4pv\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530325 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7693be4-39b9-45b7-94bf-d190088e7494-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.530354 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-metrics-certs\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.535372 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.538754 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-serving-cert\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540116 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-serving-cert\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540141 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540168 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-auth-proxy-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540187 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z2wk\" (UniqueName: \"kubernetes.io/projected/8e536769-03e0-4bc4-9947-a53d789f5b08-kube-api-access-2z2wk\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540215 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540237 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6716355d-d67e-47ab-97b5-3eea8edf4128-service-ca-bundle\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540257 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c24pz\" (UniqueName: \"kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540273 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfxts\" (UniqueName: \"kubernetes.io/projected/6716355d-d67e-47ab-97b5-3eea8edf4128-kube-api-access-lfxts\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540311 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540330 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540351 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.540373 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.544673 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-policies\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.549208 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-auth-proxy-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.550742 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.555307 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-config\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.555686 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-audit-dir\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.556004 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-encryption-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.557356 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-config\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.557616 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.558706 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.558753 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/244a8f0b-1588-493c-bc5e-20eba91a40d5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.558824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-image-import-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559126 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559210 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-serving-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559261 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8vvr\" (UniqueName: \"kubernetes.io/projected/16a9553a-1a4c-4b95-a9c8-561696a06ce6-kube-api-access-m8vvr\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559288 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559318 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v28j\" (UniqueName: \"kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559348 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559440 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-trusted-ca\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559465 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559463 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/244a8f0b-1588-493c-bc5e-20eba91a40d5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559486 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559513 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8e536769-03e0-4bc4-9947-a53d789f5b08-machine-approver-tls\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559536 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559631 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559933 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-image-import-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.559993 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.560135 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-serving-ca\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.560565 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6420c1b6-8ea6-43b7-9957-5282c8b73a76-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.560945 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.561824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn8ms\" (UniqueName: \"kubernetes.io/projected/6420c1b6-8ea6-43b7-9957-5282c8b73a76-kube-api-access-gn8ms\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.561828 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/244a8f0b-1588-493c-bc5e-20eba91a40d5-serving-cert\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.562075 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkxmt\" (UniqueName: \"kubernetes.io/projected/6dd08be9-187d-457a-ae06-ead4cb0527c6-kube-api-access-zkxmt\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.562219 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.562633 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6420c1b6-8ea6-43b7-9957-5282c8b73a76-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.563026 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.563106 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-serving-cert\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.563866 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-trusted-ca\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.564836 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.565413 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6420c1b6-8ea6-43b7-9957-5282c8b73a76-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.565893 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.566530 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-audit\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.566559 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.566937 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-serving-cert\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.567866 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.570219 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.571210 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.573183 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.576589 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578069 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.562801 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2b96dae6-a630-4803-ac16-b0853fa32542-audit\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578712 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578761 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578808 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578834 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7n9h\" (UniqueName: \"kubernetes.io/projected/e3f08bb7-dc95-4b73-b849-cee551c94ac7-kube-api-access-l7n9h\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578870 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578890 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p9jd\" (UniqueName: \"kubernetes.io/projected/244a8f0b-1588-493c-bc5e-20eba91a40d5-kube-api-access-2p9jd\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578912 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-images\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578948 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h27nz\" (UniqueName: \"kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578966 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.578990 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579026 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-stats-auth\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579051 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579070 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2vzg\" (UniqueName: \"kubernetes.io/projected/3d7f2af1-96ec-40cd-864e-aa4b4827fdae-kube-api-access-k2vzg\") pod \"downloads-7954f5f757-hndrh\" (UID: \"3d7f2af1-96ec-40cd-864e-aa4b4827fdae\") " pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579103 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-oauth-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579123 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7693be4-39b9-45b7-94bf-d190088e7494-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579134 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8e536769-03e0-4bc4-9947-a53d789f5b08-machine-approver-tls\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579160 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e3f08bb7-dc95-4b73-b849-cee551c94ac7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579198 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-audit-dir\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579222 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2g48\" (UniqueName: \"kubernetes.io/projected/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-kube-api-access-c2g48\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579261 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-serving-cert\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579287 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8bac27b6-1031-4fe6-8af0-0fd0c026940a-metrics-tls\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579305 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/eefb747e-9461-4086-8f22-03f6730c26ed-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579340 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579361 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579385 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579426 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579460 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579496 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-client\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579514 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579533 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-default-certificate\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579571 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-trusted-ca-bundle\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579592 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16a9553a-1a4c-4b95-a9c8-561696a06ce6-serving-cert\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579625 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-node-pullsecrets\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579662 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91bab5c-63b7-41f5-b761-5c50c83da5d0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579685 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-client\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p72j5\" (UniqueName: \"kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579735 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-config\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579754 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579774 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-config\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.579806 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-encryption-config\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.599949 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.600040 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-node-pullsecrets\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.600068 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-service-ca\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.600637 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.601180 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.601345 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e536769-03e0-4bc4-9947-a53d789f5b08-config\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.601765 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.601829 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b96dae6-a630-4803-ac16-b0853fa32542-audit-dir\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.602612 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.603599 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16a9553a-1a4c-4b95-a9c8-561696a06ce6-serving-cert\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.603826 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-etcd-client\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.603959 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16a9553a-1a4c-4b95-a9c8-561696a06ce6-config\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.604455 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.604976 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.604972 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.605359 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e3f08bb7-dc95-4b73-b849-cee551c94ac7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.605954 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-encryption-config\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.606235 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-dpcng"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.607055 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.607726 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.608292 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.608338 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.608457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b96dae6-a630-4803-ac16-b0853fa32542-etcd-client\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.609316 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.610339 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.611847 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.613238 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.616636 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.620194 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9cgjg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.621615 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zbcw2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.623153 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.624165 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.625281 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.626375 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.627763 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5ljxg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.628623 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hlrxs"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.629718 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.630797 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.631825 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.632836 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.633851 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.634959 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.635942 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cdm4c"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.636992 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hndrh"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.638084 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.639155 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.640407 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.641572 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.642613 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jmf78"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.642909 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.643827 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ctzzw"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.644981 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lmlp9"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.646000 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.647217 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.648460 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.650159 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-vqfz4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.651144 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.652258 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.653218 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7mtnc"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.654899 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-w6clq"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.655190 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.655840 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-w6clq"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.655961 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.656226 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7mtnc"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.657174 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-cgh7q"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.657579 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.658163 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-cgh7q"] Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.663605 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.683206 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.702867 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.702957 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703057 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkxmt\" (UniqueName: \"kubernetes.io/projected/6dd08be9-187d-457a-ae06-ead4cb0527c6-kube-api-access-zkxmt\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703096 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703131 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703190 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-images\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703224 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h27nz\" (UniqueName: \"kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703255 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703288 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703324 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2vzg\" (UniqueName: \"kubernetes.io/projected/3d7f2af1-96ec-40cd-864e-aa4b4827fdae-kube-api-access-k2vzg\") pod \"downloads-7954f5f757-hndrh\" (UID: \"3d7f2af1-96ec-40cd-864e-aa4b4827fdae\") " pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703356 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-stats-auth\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7693be4-39b9-45b7-94bf-d190088e7494-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703474 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-oauth-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703558 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2g48\" (UniqueName: \"kubernetes.io/projected/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-kube-api-access-c2g48\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703595 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-serving-cert\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703625 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8bac27b6-1031-4fe6-8af0-0fd0c026940a-metrics-tls\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703662 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/eefb747e-9461-4086-8f22-03f6730c26ed-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703691 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703716 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703750 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-default-certificate\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703775 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-trusted-ca-bundle\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703812 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p72j5\" (UniqueName: \"kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703836 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91bab5c-63b7-41f5-b761-5c50c83da5d0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703857 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703880 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-config\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703902 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-service-ca\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703947 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.703975 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704004 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6pfv\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-kube-api-access-h6pfv\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704046 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp6r5\" (UniqueName: \"kubernetes.io/projected/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-kube-api-access-vp6r5\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704076 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91bab5c-63b7-41f5-b761-5c50c83da5d0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704110 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r5ws\" (UniqueName: \"kubernetes.io/projected/eefb747e-9461-4086-8f22-03f6730c26ed-kube-api-access-6r5ws\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704136 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6lrn\" (UniqueName: \"kubernetes.io/projected/8bac27b6-1031-4fe6-8af0-0fd0c026940a-kube-api-access-f6lrn\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704160 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704181 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704211 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-oauth-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704246 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqwc6\" (UniqueName: \"kubernetes.io/projected/a91bab5c-63b7-41f5-b761-5c50c83da5d0-kube-api-access-mqwc6\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704275 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc4pv\" (UniqueName: \"kubernetes.io/projected/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-kube-api-access-nc4pv\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704298 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7693be4-39b9-45b7-94bf-d190088e7494-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704333 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-metrics-certs\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704357 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6716355d-d67e-47ab-97b5-3eea8edf4128-service-ca-bundle\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704422 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704449 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c24pz\" (UniqueName: \"kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704482 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfxts\" (UniqueName: \"kubernetes.io/projected/6716355d-d67e-47ab-97b5-3eea8edf4128-kube-api-access-lfxts\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704504 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704526 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704551 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-config\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704574 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704687 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704794 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.704986 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91bab5c-63b7-41f5-b761-5c50c83da5d0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.707533 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.708043 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-oauth-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.708721 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.709055 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-images\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.709320 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.709804 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7693be4-39b9-45b7-94bf-d190088e7494-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.710348 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.710798 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-oauth-config\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.710889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-trusted-ca-bundle\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.711372 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dd08be9-187d-457a-ae06-ead4cb0527c6-console-serving-cert\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.711606 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7693be4-39b9-45b7-94bf-d190088e7494-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.711707 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eefb747e-9461-4086-8f22-03f6730c26ed-config\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.711874 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.712207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6dd08be9-187d-457a-ae06-ead4cb0527c6-service-ca\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.712303 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-config\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.713388 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.714643 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8bac27b6-1031-4fe6-8af0-0fd0c026940a-metrics-tls\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.715628 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-serving-cert\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.715828 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/eefb747e-9461-4086-8f22-03f6730c26ed-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.717236 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.718677 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.718835 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.719789 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a91bab5c-63b7-41f5-b761-5c50c83da5d0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.723359 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.743951 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.763798 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.783551 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.803253 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.808183 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.823484 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.826081 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.844559 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.852215 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.863737 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.884257 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.905439 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.914364 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-default-certificate\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.923292 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.927059 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-stats-auth\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.943953 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.953359 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6716355d-d67e-47ab-97b5-3eea8edf4128-metrics-certs\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.963842 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.971882 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6716355d-d67e-47ab-97b5-3eea8edf4128-service-ca-bundle\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:32 crc kubenswrapper[4685]: I0129 16:28:32.982677 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.003427 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.023854 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.043139 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.063818 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.083788 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.103328 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.124429 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.144241 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.163029 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.223253 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.243704 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.263337 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.290135 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.303220 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.324272 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.348409 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.363762 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.390292 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.403822 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.423073 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.444100 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.462937 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.481593 4685 request.go:700] Waited for 1.00017729s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-client&limit=500&resourceVersion=0 Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.483857 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.503504 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.523498 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.543369 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.563726 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.584302 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.608639 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.623840 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.644149 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.663704 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.685185 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.703090 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.724045 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.744440 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.763679 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.783983 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.804013 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.824347 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.844112 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.863254 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.884434 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.903191 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.923781 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.943424 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.963775 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 29 16:28:33 crc kubenswrapper[4685]: I0129 16:28:33.985359 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.006597 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.024346 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.044208 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.064683 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.100158 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gll2\" (UniqueName: \"kubernetes.io/projected/2b96dae6-a630-4803-ac16-b0853fa32542-kube-api-access-8gll2\") pod \"apiserver-76f77b778f-hlrxs\" (UID: \"2b96dae6-a630-4803-ac16-b0853fa32542\") " pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.117430 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk9m7\" (UniqueName: \"kubernetes.io/projected/a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946-kube-api-access-lk9m7\") pod \"apiserver-7bbb656c7d-cngbb\" (UID: \"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.123275 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.144619 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.147923 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.186190 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z2wk\" (UniqueName: \"kubernetes.io/projected/8e536769-03e0-4bc4-9947-a53d789f5b08-kube-api-access-2z2wk\") pod \"machine-approver-56656f9798-ngfpx\" (UID: \"8e536769-03e0-4bc4-9947-a53d789f5b08\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.198214 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8vvr\" (UniqueName: \"kubernetes.io/projected/16a9553a-1a4c-4b95-a9c8-561696a06ce6-kube-api-access-m8vvr\") pod \"console-operator-58897d9998-9cgjg\" (UID: \"16a9553a-1a4c-4b95-a9c8-561696a06ce6\") " pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.204239 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.238480 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn8ms\" (UniqueName: \"kubernetes.io/projected/6420c1b6-8ea6-43b7-9957-5282c8b73a76-kube-api-access-gn8ms\") pod \"openshift-apiserver-operator-796bbdcf4f-k5bzr\" (UID: \"6420c1b6-8ea6-43b7-9957-5282c8b73a76\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.243877 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.286212 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.288124 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p9jd\" (UniqueName: \"kubernetes.io/projected/244a8f0b-1588-493c-bc5e-20eba91a40d5-kube-api-access-2p9jd\") pod \"openshift-config-operator-7777fb866f-hb7f9\" (UID: \"244a8f0b-1588-493c-bc5e-20eba91a40d5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.304772 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.314181 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v28j\" (UniqueName: \"kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j\") pod \"oauth-openshift-558db77b4-gn4d6\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.323872 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.324127 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7n9h\" (UniqueName: \"kubernetes.io/projected/e3f08bb7-dc95-4b73-b849-cee551c94ac7-kube-api-access-l7n9h\") pod \"cluster-samples-operator-665b6dd947-pglkm\" (UID: \"e3f08bb7-dc95-4b73-b849-cee551c94ac7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.331130 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.331224 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.337419 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.344463 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.364500 4685 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.385013 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.404569 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.423187 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.445213 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.464570 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.481766 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.484598 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.501870 4685 request.go:700] Waited for 1.843992512s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Ddefault-dockercfg-2llfx&limit=500&resourceVersion=0 Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.504813 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.516785 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.523895 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.545321 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.562467 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.580207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.593794 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.597546 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkxmt\" (UniqueName: \"kubernetes.io/projected/6dd08be9-187d-457a-ae06-ead4cb0527c6-kube-api-access-zkxmt\") pod \"console-f9d7485db-jmf78\" (UID: \"6dd08be9-187d-457a-ae06-ead4cb0527c6\") " pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.624290 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h27nz\" (UniqueName: \"kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz\") pod \"collect-profiles-29495055-gwglr\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.636119 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb"] Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.643513 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2g48\" (UniqueName: \"kubernetes.io/projected/70b3bb34-0cd2-49ff-9eb0-e392c530fab1-kube-api-access-c2g48\") pod \"control-plane-machine-set-operator-78cbb6b69f-9qg72\" (UID: \"70b3bb34-0cd2-49ff-9eb0-e392c530fab1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:34 crc kubenswrapper[4685]: W0129 16:28:34.648844 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4c55a83_d9fb_4ae1_8d1a_d18b07c2f946.slice/crio-61ef46e2fc79741b3db136f05785e48dddcf85b6910e8293efd4f1f3fd9349f9 WatchSource:0}: Error finding container 61ef46e2fc79741b3db136f05785e48dddcf85b6910e8293efd4f1f3fd9349f9: Status 404 returned error can't find the container with id 61ef46e2fc79741b3db136f05785e48dddcf85b6910e8293efd4f1f3fd9349f9 Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.658653 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2vzg\" (UniqueName: \"kubernetes.io/projected/3d7f2af1-96ec-40cd-864e-aa4b4827fdae-kube-api-access-k2vzg\") pod \"downloads-7954f5f757-hndrh\" (UID: \"3d7f2af1-96ec-40cd-864e-aa4b4827fdae\") " pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.665895 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.687855 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqwc6\" (UniqueName: \"kubernetes.io/projected/a91bab5c-63b7-41f5-b761-5c50c83da5d0-kube-api-access-mqwc6\") pod \"openshift-controller-manager-operator-756b6f6bc6-5w9zx\" (UID: \"a91bab5c-63b7-41f5-b761-5c50c83da5d0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.705083 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc4pv\" (UniqueName: \"kubernetes.io/projected/4d0d94eb-79a0-4144-a9fc-35d3d97d91c0-kube-api-access-nc4pv\") pod \"authentication-operator-69f744f599-lmlp9\" (UID: \"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.723955 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p72j5\" (UniqueName: \"kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5\") pod \"controller-manager-879f6c89f-xgmc2\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.731802 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.740751 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6pfv\" (UniqueName: \"kubernetes.io/projected/f7693be4-39b9-45b7-94bf-d190088e7494-kube-api-access-h6pfv\") pod \"cluster-image-registry-operator-dc59b4c8b-rhms4\" (UID: \"f7693be4-39b9-45b7-94bf-d190088e7494\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.746595 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.759727 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.764761 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp6r5\" (UniqueName: \"kubernetes.io/projected/df04c9b0-b6ea-443f-8c92-9b9bee0796ed-kube-api-access-vp6r5\") pod \"kube-storage-version-migrator-operator-b67b599dd-q9qgx\" (UID: \"df04c9b0-b6ea-443f-8c92-9b9bee0796ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.777181 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.779001 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.779595 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9cgjg"] Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.783905 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr"] Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.786913 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hlrxs"] Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.793352 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.797195 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c24pz\" (UniqueName: \"kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz\") pod \"route-controller-manager-6576b87f9c-94l2w\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.813893 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.826457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6lrn\" (UniqueName: \"kubernetes.io/projected/8bac27b6-1031-4fe6-8af0-0fd0c026940a-kube-api-access-f6lrn\") pod \"dns-operator-744455d44c-5ljxg\" (UID: \"8bac27b6-1031-4fe6-8af0-0fd0c026940a\") " pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.829540 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfxts\" (UniqueName: \"kubernetes.io/projected/6716355d-d67e-47ab-97b5-3eea8edf4128-kube-api-access-lfxts\") pod \"router-default-5444994796-87lv2\" (UID: \"6716355d-d67e-47ab-97b5-3eea8edf4128\") " pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.841687 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r5ws\" (UniqueName: \"kubernetes.io/projected/eefb747e-9461-4086-8f22-03f6730c26ed-kube-api-access-6r5ws\") pod \"machine-api-operator-5694c8668f-zbcw2\" (UID: \"eefb747e-9461-4086-8f22-03f6730c26ed\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.911714 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.936927 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.937711 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9mq5\" (UniqueName: \"kubernetes.io/projected/3e58813b-dc92-436d-b823-69e4988b407e-kube-api-access-d9mq5\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.937756 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.937780 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-images\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.937850 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg4t7\" (UniqueName: \"kubernetes.io/projected/557228b3-f1b9-419d-94c8-e67cf23d6d7e-kube-api-access-tg4t7\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.937914 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938609 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938690 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wdmw\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938802 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938829 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/557228b3-f1b9-419d-94c8-e67cf23d6d7e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938848 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e58813b-dc92-436d-b823-69e4988b407e-proxy-tls\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938876 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.938890 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-config\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:34 crc kubenswrapper[4685]: E0129 16:28:34.939636 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.439624021 +0000 UTC m=+142.636980273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.939764 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.939890 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.948242 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9"] Jan 29 16:28:34 crc kubenswrapper[4685]: W0129 16:28:34.962724 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16a9553a_1a4c_4b95_a9c8_561696a06ce6.slice/crio-8b34ea3ecb258905fa55fe0f08d149cec823514b3bd0b71d35cdf0a81ce2e969 WatchSource:0}: Error finding container 8b34ea3ecb258905fa55fe0f08d149cec823514b3bd0b71d35cdf0a81ce2e969: Status 404 returned error can't find the container with id 8b34ea3ecb258905fa55fe0f08d149cec823514b3bd0b71d35cdf0a81ce2e969 Jan 29 16:28:34 crc kubenswrapper[4685]: W0129 16:28:34.974330 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23ad2357_54ad_4029_8391_285a005ce5bf.slice/crio-94a042e283457aeeaaa0a015700f8bfd9cbbf4e2878a4ee7517360a559e367f6 WatchSource:0}: Error finding container 94a042e283457aeeaaa0a015700f8bfd9cbbf4e2878a4ee7517360a559e367f6: Status 404 returned error can't find the container with id 94a042e283457aeeaaa0a015700f8bfd9cbbf4e2878a4ee7517360a559e367f6 Jan 29 16:28:34 crc kubenswrapper[4685]: I0129 16:28:34.981945 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jmf78"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.011809 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.021966 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.038666 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.040970 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041138 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041170 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-images\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041195 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8jcw\" (UniqueName: \"kubernetes.io/projected/11fd8260-e90d-4e8c-8239-63ad9e0fc627-kube-api-access-m8jcw\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.041250 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.541217224 +0000 UTC m=+142.738573516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041300 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041541 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg4t7\" (UniqueName: \"kubernetes.io/projected/557228b3-f1b9-419d-94c8-e67cf23d6d7e-kube-api-access-tg4t7\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041580 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041603 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssxzj\" (UniqueName: \"kubernetes.io/projected/9772712a-fbc8-400e-950a-049a420c3b6e-kube-api-access-ssxzj\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041634 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-registration-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041664 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-apiservice-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041682 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bjfh\" (UniqueName: \"kubernetes.io/projected/18a03f5f-03c2-43a4-9b6c-632fc2237398-kube-api-access-5bjfh\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/11fd8260-e90d-4e8c-8239-63ad9e0fc627-proxy-tls\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041725 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041773 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-config\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041790 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203a75e9-22e5-4e38-ad0f-7689e0d9805a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041839 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-config\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041879 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-profile-collector-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041908 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-socket-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041958 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041974 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-srv-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.041994 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042015 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhfjb\" (UniqueName: \"kubernetes.io/projected/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-kube-api-access-dhfjb\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042033 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-webhook-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042081 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wdmw\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042118 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/557228b3-f1b9-419d-94c8-e67cf23d6d7e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042133 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e58813b-dc92-436d-b823-69e4988b407e-proxy-tls\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042152 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09081f63-559c-46a8-8118-3ba2b9b9754a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042172 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d1b78bd-30ac-4538-8d20-68dbea8999b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042216 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-config\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042234 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsqlh\" (UniqueName: \"kubernetes.io/projected/829122ce-0117-4939-b96b-39bf16965baa-kube-api-access-vsqlh\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042270 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mgm5\" (UniqueName: \"kubernetes.io/projected/edd98a69-ec85-4975-8e33-8ab13fb284d9-kube-api-access-6mgm5\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042290 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-service-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042335 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042371 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-plugins-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042404 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g2km\" (UniqueName: \"kubernetes.io/projected/7f1a9054-f580-42de-a694-42d90bc23064-kube-api-access-2g2km\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042425 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9mq5\" (UniqueName: \"kubernetes.io/projected/3e58813b-dc92-436d-b823-69e4988b407e-kube-api-access-d9mq5\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042452 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbsmh\" (UniqueName: \"kubernetes.io/projected/c8c6290e-9fff-4732-a6f0-07c463ac2f00-kube-api-access-tbsmh\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042488 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042510 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18a03f5f-03c2-43a4-9b6c-632fc2237398-metrics-tls\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042534 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1b78bd-30ac-4538-8d20-68dbea8999b1-config\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042595 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-mountpoint-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042629 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09081f63-559c-46a8-8118-3ba2b9b9754a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042646 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/11fd8260-e90d-4e8c-8239-63ad9e0fc627-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042675 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042719 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-cert\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042743 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042773 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/203a75e9-22e5-4e38-ad0f-7689e0d9805a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042842 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mnqp\" (UniqueName: \"kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042891 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c8c6290e-9fff-4732-a6f0-07c463ac2f00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042896 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-images\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042911 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb225\" (UniqueName: \"kubernetes.io/projected/e8aadbc9-3235-4121-b2db-19b256575d9a-kube-api-access-tb225\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.042972 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d1b78bd-30ac-4538-8d20-68dbea8999b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043038 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043069 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203a75e9-22e5-4e38-ad0f-7689e0d9805a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043093 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-srv-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043141 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-csi-data-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043424 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnn87\" (UniqueName: \"kubernetes.io/projected/365fe3bd-d4b5-481b-a157-45a683747d4c-kube-api-access-lnn87\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043454 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6524\" (UniqueName: \"kubernetes.io/projected/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-kube-api-access-k6524\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.043485 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-node-bootstrap-token\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.044752 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7l67\" (UniqueName: \"kubernetes.io/projected/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-kube-api-access-l7l67\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.044782 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-cabundle\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.044826 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.044975 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-certs\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045051 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-key\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045076 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a03f5f-03c2-43a4-9b6c-632fc2237398-config-volume\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045156 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045199 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-serving-cert\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045221 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-serving-cert\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.045374 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.047752 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e58813b-dc92-436d-b823-69e4988b407e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.048373 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.048811 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.54879258 +0000 UTC m=+142.746148842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.048940 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049006 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049034 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55gtt\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-kube-api-access-55gtt\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049065 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr8ks\" (UniqueName: \"kubernetes.io/projected/de69b975-f766-4c92-bb26-53fd18f4886c-kube-api-access-cr8ks\") pod \"migrator-59844c95c7-t8jrg\" (UID: \"de69b975-f766-4c92-bb26-53fd18f4886c\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049441 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049590 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-client\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.049845 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/829122ce-0117-4939-b96b-39bf16965baa-tmpfs\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.053946 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.056437 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/557228b3-f1b9-419d-94c8-e67cf23d6d7e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.056602 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.064670 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e58813b-dc92-436d-b823-69e4988b407e-proxy-tls\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.065911 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-config\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.083126 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg4t7\" (UniqueName: \"kubernetes.io/projected/557228b3-f1b9-419d-94c8-e67cf23d6d7e-kube-api-access-tg4t7\") pod \"package-server-manager-789f6589d5-9tdtm\" (UID: \"557228b3-f1b9-419d-94c8-e67cf23d6d7e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.087924 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.098343 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.115535 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.118770 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wdmw\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.153995 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.154173 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.654151885 +0000 UTC m=+142.851508147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154200 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-config\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154225 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-profile-collector-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154247 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-socket-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154268 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-srv-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154285 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhfjb\" (UniqueName: \"kubernetes.io/projected/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-kube-api-access-dhfjb\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154317 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-webhook-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154346 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09081f63-559c-46a8-8118-3ba2b9b9754a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154363 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d1b78bd-30ac-4538-8d20-68dbea8999b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154380 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsqlh\" (UniqueName: \"kubernetes.io/projected/829122ce-0117-4939-b96b-39bf16965baa-kube-api-access-vsqlh\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154412 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mgm5\" (UniqueName: \"kubernetes.io/projected/edd98a69-ec85-4975-8e33-8ab13fb284d9-kube-api-access-6mgm5\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154438 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-service-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154456 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g2km\" (UniqueName: \"kubernetes.io/projected/7f1a9054-f580-42de-a694-42d90bc23064-kube-api-access-2g2km\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154471 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-plugins-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154495 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbsmh\" (UniqueName: \"kubernetes.io/projected/c8c6290e-9fff-4732-a6f0-07c463ac2f00-kube-api-access-tbsmh\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154514 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18a03f5f-03c2-43a4-9b6c-632fc2237398-metrics-tls\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154530 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1b78bd-30ac-4538-8d20-68dbea8999b1-config\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154547 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-mountpoint-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154567 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154583 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09081f63-559c-46a8-8118-3ba2b9b9754a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154599 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/11fd8260-e90d-4e8c-8239-63ad9e0fc627-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154614 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-cert\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154630 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154650 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/203a75e9-22e5-4e38-ad0f-7689e0d9805a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154666 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mnqp\" (UniqueName: \"kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154684 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c8c6290e-9fff-4732-a6f0-07c463ac2f00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154708 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb225\" (UniqueName: \"kubernetes.io/projected/e8aadbc9-3235-4121-b2db-19b256575d9a-kube-api-access-tb225\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154731 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d1b78bd-30ac-4538-8d20-68dbea8999b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154751 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203a75e9-22e5-4e38-ad0f-7689e0d9805a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154771 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-srv-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154788 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-csi-data-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154806 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6524\" (UniqueName: \"kubernetes.io/projected/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-kube-api-access-k6524\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154822 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnn87\" (UniqueName: \"kubernetes.io/projected/365fe3bd-d4b5-481b-a157-45a683747d4c-kube-api-access-lnn87\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154836 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-node-bootstrap-token\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154851 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7l67\" (UniqueName: \"kubernetes.io/projected/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-kube-api-access-l7l67\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154866 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-cabundle\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154881 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-certs\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154898 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-key\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154914 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a03f5f-03c2-43a4-9b6c-632fc2237398-config-volume\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154948 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-serving-cert\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154963 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-serving-cert\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154972 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-config\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.154985 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55gtt\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-kube-api-access-55gtt\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155001 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr8ks\" (UniqueName: \"kubernetes.io/projected/de69b975-f766-4c92-bb26-53fd18f4886c-kube-api-access-cr8ks\") pod \"migrator-59844c95c7-t8jrg\" (UID: \"de69b975-f766-4c92-bb26-53fd18f4886c\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155018 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-client\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155034 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/829122ce-0117-4939-b96b-39bf16965baa-tmpfs\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155061 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8jcw\" (UniqueName: \"kubernetes.io/projected/11fd8260-e90d-4e8c-8239-63ad9e0fc627-kube-api-access-m8jcw\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155081 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155105 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155122 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssxzj\" (UniqueName: \"kubernetes.io/projected/9772712a-fbc8-400e-950a-049a420c3b6e-kube-api-access-ssxzj\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155140 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-registration-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155156 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-apiservice-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155172 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bjfh\" (UniqueName: \"kubernetes.io/projected/18a03f5f-03c2-43a4-9b6c-632fc2237398-kube-api-access-5bjfh\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155188 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/11fd8260-e90d-4e8c-8239-63ad9e0fc627-proxy-tls\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155203 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203a75e9-22e5-4e38-ad0f-7689e0d9805a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155222 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-config\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155366 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-socket-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.155921 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-config\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.157624 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-profile-collector-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.158091 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.159306 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-serving-cert\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.159559 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/365fe3bd-d4b5-481b-a157-45a683747d4c-srv-cert\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.159592 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-plugins-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.160166 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.161139 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203a75e9-22e5-4e38-ad0f-7689e0d9805a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.161425 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9mq5\" (UniqueName: \"kubernetes.io/projected/3e58813b-dc92-436d-b823-69e4988b407e-kube-api-access-d9mq5\") pod \"machine-config-operator-74547568cd-v2t2v\" (UID: \"3e58813b-dc92-436d-b823-69e4988b407e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.162190 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-cabundle\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.162891 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.161439 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-csi-data-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.164217 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-serving-cert\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.165256 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-registration-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.165735 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-service-ca\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.165753 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18a03f5f-03c2-43a4-9b6c-632fc2237398-metrics-tls\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.166092 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ba8cfc8-5a1b-49c0-9ea8-038965d33587-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xnmb9\" (UID: \"6ba8cfc8-5a1b-49c0-9ea8-038965d33587\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.166416 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.666385247 +0000 UTC m=+142.863741509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.166445 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d1b78bd-30ac-4538-8d20-68dbea8999b1-config\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.166498 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8aadbc9-3235-4121-b2db-19b256575d9a-mountpoint-dir\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.166728 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.168238 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/edd98a69-ec85-4975-8e33-8ab13fb284d9-signing-key\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.168536 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-apiservice-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.168544 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09081f63-559c-46a8-8118-3ba2b9b9754a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.169813 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-certs\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.170374 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c8c6290e-9fff-4732-a6f0-07c463ac2f00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.170951 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9772712a-fbc8-400e-950a-049a420c3b6e-etcd-client\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.171902 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/11fd8260-e90d-4e8c-8239-63ad9e0fc627-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.172014 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.173177 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09081f63-559c-46a8-8118-3ba2b9b9754a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.173816 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/829122ce-0117-4939-b96b-39bf16965baa-webhook-cert\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.175873 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203a75e9-22e5-4e38-ad0f-7689e0d9805a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.177926 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/11fd8260-e90d-4e8c-8239-63ad9e0fc627-proxy-tls\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.180467 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-cert\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.181000 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-node-bootstrap-token\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.181846 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d1b78bd-30ac-4538-8d20-68dbea8999b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.192004 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.192677 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.194708 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7f1a9054-f580-42de-a694-42d90bc23064-srv-cert\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.209847 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.211121 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lmlp9"] Jan 29 16:28:35 crc kubenswrapper[4685]: W0129 16:28:35.215896 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda91bab5c_63b7_41f5_b761_5c50c83da5d0.slice/crio-6077d9401b023ee187c90708076515d3217a565aa09aa68f0277c07c2bb4d86d WatchSource:0}: Error finding container 6077d9401b023ee187c90708076515d3217a565aa09aa68f0277c07c2bb4d86d: Status 404 returned error can't find the container with id 6077d9401b023ee187c90708076515d3217a565aa09aa68f0277c07c2bb4d86d Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.229668 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g2km\" (UniqueName: \"kubernetes.io/projected/7f1a9054-f580-42de-a694-42d90bc23064-kube-api-access-2g2km\") pod \"olm-operator-6b444d44fb-7lbmb\" (UID: \"7f1a9054-f580-42de-a694-42d90bc23064\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.237279 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.249660 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/203a75e9-22e5-4e38-ad0f-7689e0d9805a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-g2nd4\" (UID: \"203a75e9-22e5-4e38-ad0f-7689e0d9805a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.255904 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.256981 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.756958882 +0000 UTC m=+142.954315144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.261779 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.262047 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.267025 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hndrh"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.268150 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.278169 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbsmh\" (UniqueName: \"kubernetes.io/projected/c8c6290e-9fff-4732-a6f0-07c463ac2f00-kube-api-access-tbsmh\") pod \"multus-admission-controller-857f4d67dd-cdm4c\" (UID: \"c8c6290e-9fff-4732-a6f0-07c463ac2f00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.287576 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/829122ce-0117-4939-b96b-39bf16965baa-tmpfs\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.289188 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr"] Jan 29 16:28:35 crc kubenswrapper[4685]: W0129 16:28:35.294502 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod260430db_c4ee_45e2_9f66_249c413e7e74.slice/crio-4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773 WatchSource:0}: Error finding container 4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773: Status 404 returned error can't find the container with id 4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773 Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.295877 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mnqp\" (UniqueName: \"kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp\") pod \"marketplace-operator-79b997595-rzswn\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.303023 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhfjb\" (UniqueName: \"kubernetes.io/projected/e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa-kube-api-access-dhfjb\") pod \"service-ca-operator-777779d784-cjfmq\" (UID: \"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.337505 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d1b78bd-30ac-4538-8d20-68dbea8999b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8t9k2\" (UID: \"6d1b78bd-30ac-4538-8d20-68dbea8999b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.347199 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18a03f5f-03c2-43a4-9b6c-632fc2237398-config-volume\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.347569 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb225\" (UniqueName: \"kubernetes.io/projected/e8aadbc9-3235-4121-b2db-19b256575d9a-kube-api-access-tb225\") pod \"csi-hostpathplugin-7mtnc\" (UID: \"e8aadbc9-3235-4121-b2db-19b256575d9a\") " pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.357460 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.357970 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.85795476 +0000 UTC m=+143.055311032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.360104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7l67\" (UniqueName: \"kubernetes.io/projected/ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3-kube-api-access-l7l67\") pod \"ingress-canary-cgh7q\" (UID: \"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3\") " pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.379722 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6524\" (UniqueName: \"kubernetes.io/projected/a6d5603e-1d93-4305-9a3e-28ee5a346a3b-kube-api-access-k6524\") pod \"machine-config-server-dpcng\" (UID: \"a6d5603e-1d93-4305-9a3e-28ee5a346a3b\") " pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.405073 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnn87\" (UniqueName: \"kubernetes.io/projected/365fe3bd-d4b5-481b-a157-45a683747d4c-kube-api-access-lnn87\") pod \"catalog-operator-68c6474976-tr6jn\" (UID: \"365fe3bd-d4b5-481b-a157-45a683747d4c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.432901 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.440216 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8jcw\" (UniqueName: \"kubernetes.io/projected/11fd8260-e90d-4e8c-8239-63ad9e0fc627-kube-api-access-m8jcw\") pod \"machine-config-controller-84d6567774-b9dbg\" (UID: \"11fd8260-e90d-4e8c-8239-63ad9e0fc627\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.448536 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.451217 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.458146 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.458321 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.958293109 +0000 UTC m=+143.155649381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.458684 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.459142 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:35.959122472 +0000 UTC m=+143.156478744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.462475 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssxzj\" (UniqueName: \"kubernetes.io/projected/9772712a-fbc8-400e-950a-049a420c3b6e-kube-api-access-ssxzj\") pod \"etcd-operator-b45778765-ctzzw\" (UID: \"9772712a-fbc8-400e-950a-049a420c3b6e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.469070 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" event={"ID":"16a9553a-1a4c-4b95-a9c8-561696a06ce6","Type":"ContainerStarted","Data":"8b34ea3ecb258905fa55fe0f08d149cec823514b3bd0b71d35cdf0a81ce2e969"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.469786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" event={"ID":"2b96dae6-a630-4803-ac16-b0853fa32542","Type":"ContainerStarted","Data":"fbd4bb8fd939385136ebc90b1a55639c4af54d4977e9ff0efc5b8fd43a8c1929"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.470356 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" event={"ID":"244a8f0b-1588-493c-bc5e-20eba91a40d5","Type":"ContainerStarted","Data":"626841921f51af050d78ff2585c9e64fc7ed4091014d1ceb79e09a3c54d04982"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.470967 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" event={"ID":"23ad2357-54ad-4029-8391-285a005ce5bf","Type":"ContainerStarted","Data":"94a042e283457aeeaaa0a015700f8bfd9cbbf4e2878a4ee7517360a559e367f6"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.471615 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" event={"ID":"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946","Type":"ContainerStarted","Data":"61ef46e2fc79741b3db136f05785e48dddcf85b6910e8293efd4f1f3fd9349f9"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.472177 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" event={"ID":"260430db-c4ee-45e2-9f66-249c413e7e74","Type":"ContainerStarted","Data":"4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.472784 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jmf78" event={"ID":"6dd08be9-187d-457a-ae06-ead4cb0527c6","Type":"ContainerStarted","Data":"fabbcdd4d4e9d36df82f0181a0a6f306d8e892ea4e885cefd070d1df6f584d83"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.473353 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" event={"ID":"a91bab5c-63b7-41f5-b761-5c50c83da5d0","Type":"ContainerStarted","Data":"6077d9401b023ee187c90708076515d3217a565aa09aa68f0277c07c2bb4d86d"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.473924 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" event={"ID":"6420c1b6-8ea6-43b7-9957-5282c8b73a76","Type":"ContainerStarted","Data":"6127545f7bf88954e1ea40970ea35c15b6e4a9edb70351dfcb1034df3ded42d7"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.474922 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" event={"ID":"8e536769-03e0-4bc4-9947-a53d789f5b08","Type":"ContainerStarted","Data":"6a48578e48c30590ebe94f6651deb78eea5f49959bd197836399c212caafb415"} Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.478270 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr8ks\" (UniqueName: \"kubernetes.io/projected/de69b975-f766-4c92-bb26-53fd18f4886c-kube-api-access-cr8ks\") pod \"migrator-59844c95c7-t8jrg\" (UID: \"de69b975-f766-4c92-bb26-53fd18f4886c\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.483039 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.492975 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.497338 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsqlh\" (UniqueName: \"kubernetes.io/projected/829122ce-0117-4939-b96b-39bf16965baa-kube-api-access-vsqlh\") pod \"packageserver-d55dfcdfc-jtc28\" (UID: \"829122ce-0117-4939-b96b-39bf16965baa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.504770 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.515835 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.525658 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.542940 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bjfh\" (UniqueName: \"kubernetes.io/projected/18a03f5f-03c2-43a4-9b6c-632fc2237398-kube-api-access-5bjfh\") pod \"dns-default-w6clq\" (UID: \"18a03f5f-03c2-43a4-9b6c-632fc2237398\") " pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.543777 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.552160 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.559737 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mgm5\" (UniqueName: \"kubernetes.io/projected/edd98a69-ec85-4975-8e33-8ab13fb284d9-kube-api-access-6mgm5\") pod \"service-ca-9c57cc56f-vqfz4\" (UID: \"edd98a69-ec85-4975-8e33-8ab13fb284d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.560543 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.561177 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.061118657 +0000 UTC m=+143.258474929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.561452 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.561979 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.06196702 +0000 UTC m=+143.259323302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.562334 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55gtt\" (UniqueName: \"kubernetes.io/projected/09081f63-559c-46a8-8118-3ba2b9b9754a-kube-api-access-55gtt\") pod \"ingress-operator-5b745b69d9-gf6h6\" (UID: \"09081f63-559c-46a8-8118-3ba2b9b9754a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.568877 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.575577 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.584966 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-dpcng" Jan 29 16:28:35 crc kubenswrapper[4685]: W0129 16:28:35.594246 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2280720_bbcb_438f_b38e_3b24566ce570.slice/crio-1c5c0ef4cd80fa76be12a16cc039a3e720d37f2d1f7587b92b5ad98180b299bc WatchSource:0}: Error finding container 1c5c0ef4cd80fa76be12a16cc039a3e720d37f2d1f7587b92b5ad98180b299bc: Status 404 returned error can't find the container with id 1c5c0ef4cd80fa76be12a16cc039a3e720d37f2d1f7587b92b5ad98180b299bc Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.615667 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.622510 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.630496 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-cgh7q" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.662562 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.663014 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.162994718 +0000 UTC m=+143.360350980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: W0129 16:28:35.751891 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6716355d_d67e_47ab_97b5_3eea8edf4128.slice/crio-7cfbc946480cdd086e39e44ac6ef80f1b293dad533db693545935cde376cb1e6 WatchSource:0}: Error finding container 7cfbc946480cdd086e39e44ac6ef80f1b293dad533db693545935cde376cb1e6: Status 404 returned error can't find the container with id 7cfbc946480cdd086e39e44ac6ef80f1b293dad533db693545935cde376cb1e6 Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.764543 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.764943 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.26493026 +0000 UTC m=+143.462286522 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.772966 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.831032 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.873689 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.874681 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.374622644 +0000 UTC m=+143.571978906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.902306 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:28:35 crc kubenswrapper[4685]: I0129 16:28:35.988186 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:35 crc kubenswrapper[4685]: E0129 16:28:35.989060 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.489035255 +0000 UTC m=+143.686391517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.022311 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm"] Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.076580 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx"] Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.094083 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.094500 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.594476553 +0000 UTC m=+143.791832815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.168708 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-cgh7q"] Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.197084 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.697060704 +0000 UTC m=+143.894416996 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.196564 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.301461 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.301737 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.80169772 +0000 UTC m=+143.999053982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.307439 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.308046 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.808016681 +0000 UTC m=+144.005373133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.410062 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.410662 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:36.910630073 +0000 UTC m=+144.107986345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.487978 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" event={"ID":"802aac97-996b-4fbc-8616-0ec29f5462df","Type":"ContainerStarted","Data":"0a739e49fca629511080e4c4d3efc18b612c1cc1f18f2c7355d708c8d834c161"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.490419 4685 generic.go:334] "Generic (PLEG): container finished" podID="2b96dae6-a630-4803-ac16-b0853fa32542" containerID="7be254b1d01062cbbb762b38c7d2645a660ee23b224465bd6c51b5fe6b96ccb2" exitCode=0 Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.490463 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" event={"ID":"2b96dae6-a630-4803-ac16-b0853fa32542","Type":"ContainerDied","Data":"7be254b1d01062cbbb762b38c7d2645a660ee23b224465bd6c51b5fe6b96ccb2"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.492910 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hndrh" event={"ID":"3d7f2af1-96ec-40cd-864e-aa4b4827fdae","Type":"ContainerStarted","Data":"b54f8bd86a4c64c80ee3731f8c4fddf138ff91a4273bc6023a61df54f2d52bad"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.499076 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" event={"ID":"557228b3-f1b9-419d-94c8-e67cf23d6d7e","Type":"ContainerStarted","Data":"a9776c2bcf0d7f21e35c0a37ab70a4f6beb314c9f7c6eaac26d804f5ce313424"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.501803 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" event={"ID":"8e536769-03e0-4bc4-9947-a53d789f5b08","Type":"ContainerStarted","Data":"b1e3c03d0a037ba21c210ca9f6700490bc842ba6d9bb4bfdb3fb2d0d4dfb05dd"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.504629 4685 generic.go:334] "Generic (PLEG): container finished" podID="a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946" containerID="2ffb477c86860ef63c408c39d4f57235205c1d68dbae054d108cead55b5657e0" exitCode=0 Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.504818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" event={"ID":"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946","Type":"ContainerDied","Data":"2ffb477c86860ef63c408c39d4f57235205c1d68dbae054d108cead55b5657e0"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.506707 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" event={"ID":"70b3bb34-0cd2-49ff-9eb0-e392c530fab1","Type":"ContainerStarted","Data":"6d80963808e5029bc6082fa9fbebea99fa6420dcb789da882c5f5267d04c5ab2"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.510057 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jmf78" event={"ID":"6dd08be9-187d-457a-ae06-ead4cb0527c6","Type":"ContainerStarted","Data":"29a8570111b993089c9e2f2b291f492f8d7e9dfe55b7b4e44c404718ea0df4b9"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.511539 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.511973 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.01195461 +0000 UTC m=+144.209310872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.515877 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" event={"ID":"16a9553a-1a4c-4b95-a9c8-561696a06ce6","Type":"ContainerStarted","Data":"a9940c8acb3f705d0916918e2527a5f8af7f5444539078821b0e7c93957b3668"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.516043 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.520590 4685 patch_prober.go:28] interesting pod/console-operator-58897d9998-9cgjg container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.520672 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" podUID="16a9553a-1a4c-4b95-a9c8-561696a06ce6" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.520726 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" event={"ID":"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0","Type":"ContainerStarted","Data":"ddf89e52cf5a01b4ab988b58eacfe791c84871a8284f3f49fe2df1769340f0a2"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.522472 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" event={"ID":"e3f08bb7-dc95-4b73-b849-cee551c94ac7","Type":"ContainerStarted","Data":"ba6fa43ae4f8a9249635ae0fb90789b602b0cfcd84d5b146be0cdc9d78591863"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.524827 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-dpcng" event={"ID":"a6d5603e-1d93-4305-9a3e-28ee5a346a3b","Type":"ContainerStarted","Data":"0cfafbea88f590683c5f29190e31a174389c0c61ef54b48c00d47709de064cc2"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.528995 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" event={"ID":"df04c9b0-b6ea-443f-8c92-9b9bee0796ed","Type":"ContainerStarted","Data":"8a4af1e3a2e70031cda7fc0233f818cf51e9b224979725d0617d69a0849a75cd"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.531417 4685 generic.go:334] "Generic (PLEG): container finished" podID="244a8f0b-1588-493c-bc5e-20eba91a40d5" containerID="5b56ac4e3c1db5a392aa731189de3b0085f790e6de03ead07de7bef8138d0e7b" exitCode=0 Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.531481 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" event={"ID":"244a8f0b-1588-493c-bc5e-20eba91a40d5","Type":"ContainerDied","Data":"5b56ac4e3c1db5a392aa731189de3b0085f790e6de03ead07de7bef8138d0e7b"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.535148 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-87lv2" event={"ID":"6716355d-d67e-47ab-97b5-3eea8edf4128","Type":"ContainerStarted","Data":"7cfbc946480cdd086e39e44ac6ef80f1b293dad533db693545935cde376cb1e6"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.538983 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" event={"ID":"e2280720-bbcb-438f-b38e-3b24566ce570","Type":"ContainerStarted","Data":"1c5c0ef4cd80fa76be12a16cc039a3e720d37f2d1f7587b92b5ad98180b299bc"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.540894 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-cgh7q" event={"ID":"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3","Type":"ContainerStarted","Data":"721d821ee2607361c8827ff9a9f13f3c4d9de1f10695c31bc6637940d5fecbd1"} Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.613255 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.613812 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.113761529 +0000 UTC m=+144.311117801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.614324 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.615099 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4"] Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.615542 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.115523217 +0000 UTC m=+144.312879489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.715952 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.716848 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.216818113 +0000 UTC m=+144.414174375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.817904 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.818341 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.318284053 +0000 UTC m=+144.515640315 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:36 crc kubenswrapper[4685]: I0129 16:28:36.919569 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:36 crc kubenswrapper[4685]: E0129 16:28:36.919916 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.419901628 +0000 UTC m=+144.617257890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.021333 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.022123 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.522107098 +0000 UTC m=+144.719463380 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.116273 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" podStartSLOduration=123.116257651 podStartE2EDuration="2m3.116257651s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.059572544 +0000 UTC m=+144.256928816" watchObservedRunningTime="2026-01-29 16:28:37.116257651 +0000 UTC m=+144.313613913" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.124954 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.125293 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.625260555 +0000 UTC m=+144.822616877 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.134051 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.146232 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.152096 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.157706 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-jmf78" podStartSLOduration=123.157686933 podStartE2EDuration="2m3.157686933s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.151731592 +0000 UTC m=+144.349087864" watchObservedRunningTime="2026-01-29 16:28:37.157686933 +0000 UTC m=+144.355043195" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.157838 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5ljxg"] Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.182278 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod092013a3_0a24_4a94_ba85_8fe88c4c50d4.slice/crio-186726ee422b1eaac50cb587951a75d3cd5c61a3346642dc4ec7a8bc8f3873ba WatchSource:0}: Error finding container 186726ee422b1eaac50cb587951a75d3cd5c61a3346642dc4ec7a8bc8f3873ba: Status 404 returned error can't find the container with id 186726ee422b1eaac50cb587951a75d3cd5c61a3346642dc4ec7a8bc8f3873ba Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.227885 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.228941 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.728920564 +0000 UTC m=+144.926276826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.312452 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cdm4c"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.315767 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.329363 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.329754 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.829739417 +0000 UTC m=+145.027095679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.350458 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8c6290e_9fff_4732_a6f0_07c463ac2f00.slice/crio-4b5de08c84a5df101d583c5249539eb158f0da4ddb58746a6b6e971ed4875462 WatchSource:0}: Error finding container 4b5de08c84a5df101d583c5249539eb158f0da4ddb58746a6b6e971ed4875462: Status 404 returned error can't find the container with id 4b5de08c84a5df101d583c5249539eb158f0da4ddb58746a6b6e971ed4875462 Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.375321 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.398071 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.402996 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.408155 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.415537 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zbcw2"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.418638 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v"] Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.422906 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11fd8260_e90d_4e8c_8239_63ad9e0fc627.slice/crio-dc9d168a6600d2b26eeb9b116e6b5033cc0ec720224b517eafadfd66487d1370 WatchSource:0}: Error finding container dc9d168a6600d2b26eeb9b116e6b5033cc0ec720224b517eafadfd66487d1370: Status 404 returned error can't find the container with id dc9d168a6600d2b26eeb9b116e6b5033cc0ec720224b517eafadfd66487d1370 Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.427410 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-w6clq"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.431633 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.431938 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:37.931924486 +0000 UTC m=+145.129280748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.437649 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ctzzw"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.442297 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7mtnc"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.445151 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.447259 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.450162 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb"] Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.467410 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode75dcf7c_3bde_4c6c_bb25_2a2bd7e4efaa.slice/crio-0056bc02563ea0d27106fd2cbab09e2dc1ff76e5a37503c550f0f53ad08521df WatchSource:0}: Error finding container 0056bc02563ea0d27106fd2cbab09e2dc1ff76e5a37503c550f0f53ad08521df: Status 404 returned error can't find the container with id 0056bc02563ea0d27106fd2cbab09e2dc1ff76e5a37503c550f0f53ad08521df Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.483887 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeefb747e_9461_4086_8f22_03f6730c26ed.slice/crio-29b46b06375d05e99abc0e71803ea2eddcecc44c72b66efc40a2d1f34e2f78bb WatchSource:0}: Error finding container 29b46b06375d05e99abc0e71803ea2eddcecc44c72b66efc40a2d1f34e2f78bb: Status 404 returned error can't find the container with id 29b46b06375d05e99abc0e71803ea2eddcecc44c72b66efc40a2d1f34e2f78bb Jan 29 16:28:37 crc kubenswrapper[4685]: W0129 16:28:37.490962 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d1b78bd_30ac_4538_8d20_68dbea8999b1.slice/crio-4e9e65a01258c236fe9dc7db9d629fa5f5bf8ce6a0d3ceb924341106d62d35fa WatchSource:0}: Error finding container 4e9e65a01258c236fe9dc7db9d629fa5f5bf8ce6a0d3ceb924341106d62d35fa: Status 404 returned error can't find the container with id 4e9e65a01258c236fe9dc7db9d629fa5f5bf8ce6a0d3ceb924341106d62d35fa Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.532921 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.533607 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.033582523 +0000 UTC m=+145.230938785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.550063 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-vqfz4"] Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.551869 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-dpcng" event={"ID":"a6d5603e-1d93-4305-9a3e-28ee5a346a3b","Type":"ContainerStarted","Data":"db25f1481043ccdb0da9e6bd43f55acf70fde2a6210383a5d9d14f421dbba1ab"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.556083 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" event={"ID":"de69b975-f766-4c92-bb26-53fd18f4886c","Type":"ContainerStarted","Data":"59ef6eff0520898af1ba973dcc719d724b65e789aeec806c4dece6a24c7d8365"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.562947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" event={"ID":"6420c1b6-8ea6-43b7-9957-5282c8b73a76","Type":"ContainerStarted","Data":"246a8ab7a345994fea19c119df0bfa0ba318630abaf66a505005b42d1a179a57"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.566167 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" event={"ID":"829122ce-0117-4939-b96b-39bf16965baa","Type":"ContainerStarted","Data":"134b90e1d89dcf53099823aad31f1e9adc08463d35b7493dd1cd5272def15fd6"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.572406 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" event={"ID":"260430db-c4ee-45e2-9f66-249c413e7e74","Type":"ContainerStarted","Data":"c0c795014e661df620df995bb5000e82ffb7cddbecf170d1fcea4a803e9f0c53"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.573460 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-dpcng" podStartSLOduration=5.573441123 podStartE2EDuration="5.573441123s" podCreationTimestamp="2026-01-29 16:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.570929435 +0000 UTC m=+144.768285717" watchObservedRunningTime="2026-01-29 16:28:37.573441123 +0000 UTC m=+144.770797375" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.586407 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" event={"ID":"092013a3-0a24-4a94-ba85-8fe88c4c50d4","Type":"ContainerStarted","Data":"186726ee422b1eaac50cb587951a75d3cd5c61a3346642dc4ec7a8bc8f3873ba"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.590081 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" event={"ID":"70b3bb34-0cd2-49ff-9eb0-e392c530fab1","Type":"ContainerStarted","Data":"37f831e57fee1e263110542bca73aba6b69caf750849a6844f26dd7c29984266"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.590065 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k5bzr" podStartSLOduration=123.590040063 podStartE2EDuration="2m3.590040063s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.585237002 +0000 UTC m=+144.782593284" watchObservedRunningTime="2026-01-29 16:28:37.590040063 +0000 UTC m=+144.787396335" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.597130 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" event={"ID":"8e536769-03e0-4bc4-9947-a53d789f5b08","Type":"ContainerStarted","Data":"5d2ade5ae8b6aad02392bb4cf89c388464be932c660f003d43dcf93fbe48328c"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.605021 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" podStartSLOduration=124.605003148 podStartE2EDuration="2m4.605003148s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.602946282 +0000 UTC m=+144.800302564" watchObservedRunningTime="2026-01-29 16:28:37.605003148 +0000 UTC m=+144.802359410" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.607659 4685 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-gn4d6 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.607765 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.610398 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.610438 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" event={"ID":"23ad2357-54ad-4029-8391-285a005ce5bf","Type":"ContainerStarted","Data":"e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.610456 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" event={"ID":"3e58813b-dc92-436d-b823-69e4988b407e","Type":"ContainerStarted","Data":"469ae4c2fb9f62a96ae0f1f7521f6d649a1f87de4048c95eb406c90d016b1015"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.611630 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" event={"ID":"e2280720-bbcb-438f-b38e-3b24566ce570","Type":"ContainerStarted","Data":"862e29ea49052b4624b778d4ef27ef121979d641ad586135f5216f595dfb9597"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.612527 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.614582 4685 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-94l2w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.614674 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.618237 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" event={"ID":"244a8f0b-1588-493c-bc5e-20eba91a40d5","Type":"ContainerStarted","Data":"774de4c80adf2234917351529d4ea351987a5eb7646852e12868ec3aefa79344"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.619659 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.634785 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.641589 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.141569549 +0000 UTC m=+145.338925811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.648139 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ngfpx" podStartSLOduration=124.648122187 podStartE2EDuration="2m4.648122187s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.625882434 +0000 UTC m=+144.823238716" watchObservedRunningTime="2026-01-29 16:28:37.648122187 +0000 UTC m=+144.845478449" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.698501 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9qg72" podStartSLOduration=123.698483872 podStartE2EDuration="2m3.698483872s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.652211398 +0000 UTC m=+144.849567670" watchObservedRunningTime="2026-01-29 16:28:37.698483872 +0000 UTC m=+144.895840134" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.699193 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" podStartSLOduration=122.699188781 podStartE2EDuration="2m2.699188781s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.697747652 +0000 UTC m=+144.895103934" watchObservedRunningTime="2026-01-29 16:28:37.699188781 +0000 UTC m=+144.896545043" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.701048 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-87lv2" event={"ID":"6716355d-d67e-47ab-97b5-3eea8edf4128","Type":"ContainerStarted","Data":"b58f21400702999ea4b05df1f7e5129a9b8e5615450c4c105969be95982a1534"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.705500 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-cgh7q" event={"ID":"ab51feaa-96af-43ee-bf2f-d48cd3ee8cb3","Type":"ContainerStarted","Data":"1e1a3851e583c0112b4ab06e9c47b7dce49830fa32d08798214725a8203029a6"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.708360 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" event={"ID":"09081f63-559c-46a8-8118-3ba2b9b9754a","Type":"ContainerStarted","Data":"7d814625d229d8d8a66e27476953205b498dd63dfc1181e3e791fda95445f5ad"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.711620 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" event={"ID":"a91bab5c-63b7-41f5-b761-5c50c83da5d0","Type":"ContainerStarted","Data":"9eefe28ceff6d278283fc3fa6cf0fb1abe5bd8bf4d4f65412809ff413ab73ec3"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.715635 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" event={"ID":"11fd8260-e90d-4e8c-8239-63ad9e0fc627","Type":"ContainerStarted","Data":"dc9d168a6600d2b26eeb9b116e6b5033cc0ec720224b517eafadfd66487d1370"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.726558 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" event={"ID":"e3f08bb7-dc95-4b73-b849-cee551c94ac7","Type":"ContainerStarted","Data":"46f0824694a76f3a81f43c77234c4e8df0d3d7ec3460975a9caadd073cbdf878"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.726608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" event={"ID":"e3f08bb7-dc95-4b73-b849-cee551c94ac7","Type":"ContainerStarted","Data":"f4ee89fe87322062646efbe3e10397a0d8e5102a996c06f3e6734bb3c7faeb88"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.728057 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" event={"ID":"8bac27b6-1031-4fe6-8af0-0fd0c026940a","Type":"ContainerStarted","Data":"f59bacc45f6033103961f8c9b0ad518924ca5618a11663b517cf0e32e28d9806"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.729788 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" event={"ID":"e8aadbc9-3235-4121-b2db-19b256575d9a","Type":"ContainerStarted","Data":"05e2f50e34b0c8934059990c824000fca8b4a582808337b4f024958d399ac012"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.731497 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" event={"ID":"203a75e9-22e5-4e38-ad0f-7689e0d9805a","Type":"ContainerStarted","Data":"8c93bc1263c834607722cf6c408516340ab2a9b863bc51d16e2b9c14b909cb20"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.732381 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" event={"ID":"c8c6290e-9fff-4732-a6f0-07c463ac2f00","Type":"ContainerStarted","Data":"4b5de08c84a5df101d583c5249539eb158f0da4ddb58746a6b6e971ed4875462"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.735551 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w6clq" event={"ID":"18a03f5f-03c2-43a4-9b6c-632fc2237398","Type":"ContainerStarted","Data":"2d660c302a81f5e5f7dff6bcc64c6dfd867597d8ec0c4db2641b73b6b45a0a33"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.736486 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.736670 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.236647106 +0000 UTC m=+145.434003368 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.737020 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.738930 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.238922468 +0000 UTC m=+145.436278730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.747788 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" podStartSLOduration=123.747774328 podStartE2EDuration="2m3.747774328s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.721012513 +0000 UTC m=+144.918368795" watchObservedRunningTime="2026-01-29 16:28:37.747774328 +0000 UTC m=+144.945130590" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.774577 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" podStartSLOduration=123.774562984 podStartE2EDuration="2m3.774562984s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.749819214 +0000 UTC m=+144.947175486" watchObservedRunningTime="2026-01-29 16:28:37.774562984 +0000 UTC m=+144.971919246" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.774881 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5w9zx" podStartSLOduration=123.774877053 podStartE2EDuration="2m3.774877053s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.770774661 +0000 UTC m=+144.968130923" watchObservedRunningTime="2026-01-29 16:28:37.774877053 +0000 UTC m=+144.972233315" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.781537 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" event={"ID":"2b96dae6-a630-4803-ac16-b0853fa32542","Type":"ContainerStarted","Data":"f8a6c24cd5cefc5636ed095f87f79769573afb3c4e283fdf16685d680da406b1"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.815664 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" event={"ID":"f7693be4-39b9-45b7-94bf-d190088e7494","Type":"ContainerStarted","Data":"c447e906e213f48c41cdb0e31f9bf024771bc87329f47cb331475a4daa9e08e7"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.816231 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" event={"ID":"f7693be4-39b9-45b7-94bf-d190088e7494","Type":"ContainerStarted","Data":"2bcaac7b7c77e003cba93d56cdb8aa5f0b1af39ea5de7073ca696e2d82f283a7"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.828697 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pglkm" podStartSLOduration=123.828676241 podStartE2EDuration="2m3.828676241s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.821513856 +0000 UTC m=+145.018870128" watchObservedRunningTime="2026-01-29 16:28:37.828676241 +0000 UTC m=+145.026032503" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.829566 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-cgh7q" podStartSLOduration=5.829559305 podStartE2EDuration="5.829559305s" podCreationTimestamp="2026-01-29 16:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.794461844 +0000 UTC m=+144.991818116" watchObservedRunningTime="2026-01-29 16:28:37.829559305 +0000 UTC m=+145.026915577" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.831248 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" event={"ID":"6d1b78bd-30ac-4538-8d20-68dbea8999b1","Type":"ContainerStarted","Data":"4e9e65a01258c236fe9dc7db9d629fa5f5bf8ce6a0d3ceb924341106d62d35fa"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.838594 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.844253 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.344229353 +0000 UTC m=+145.541585615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.845955 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-87lv2" podStartSLOduration=123.845939079 podStartE2EDuration="2m3.845939079s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.843920764 +0000 UTC m=+145.041277046" watchObservedRunningTime="2026-01-29 16:28:37.845939079 +0000 UTC m=+145.043295351" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.846779 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" event={"ID":"802aac97-996b-4fbc-8616-0ec29f5462df","Type":"ContainerStarted","Data":"04b442f720c029c31052248b741cbdb9fe23dd36a902860a3c44f45a51295528"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.847799 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.853745 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" event={"ID":"9772712a-fbc8-400e-950a-049a420c3b6e","Type":"ContainerStarted","Data":"21960f6e7ce32237a263f64f9c6faf6d717f8002288688635f1b895c92a713be"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.869845 4685 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xgmc2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.871117 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.870584 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" event={"ID":"df04c9b0-b6ea-443f-8c92-9b9bee0796ed","Type":"ContainerStarted","Data":"10195147b71bd0086a8e3fa5aa508621cb8f32903d90c09518a23471a87058ad"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.872568 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" event={"ID":"7f1a9054-f580-42de-a694-42d90bc23064","Type":"ContainerStarted","Data":"e79f451d0cf64ab504c73ef036b84f8d6b7664788436b568e2ccaf44cc95c039"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.874145 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" event={"ID":"6ba8cfc8-5a1b-49c0-9ea8-038965d33587","Type":"ContainerStarted","Data":"5261e90c10a59647b5de579d64d079d576d686a8108451ab95704f601163ad7f"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.879787 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rhms4" podStartSLOduration=123.879754975 podStartE2EDuration="2m3.879754975s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.865510299 +0000 UTC m=+145.062866591" watchObservedRunningTime="2026-01-29 16:28:37.879754975 +0000 UTC m=+145.077111237" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.898611 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podStartSLOduration=123.898592606 podStartE2EDuration="2m3.898592606s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.880374262 +0000 UTC m=+145.077730544" watchObservedRunningTime="2026-01-29 16:28:37.898592606 +0000 UTC m=+145.095948868" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.908803 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" event={"ID":"a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946","Type":"ContainerStarted","Data":"c6bbe0449aefb7640efe2d4037ea4b076893e379b1ff3b44f07c8640c1504056"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.915715 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" event={"ID":"eefb747e-9461-4086-8f22-03f6730c26ed","Type":"ContainerStarted","Data":"29b46b06375d05e99abc0e71803ea2eddcecc44c72b66efc40a2d1f34e2f78bb"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.919722 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" event={"ID":"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa","Type":"ContainerStarted","Data":"0056bc02563ea0d27106fd2cbab09e2dc1ff76e5a37503c550f0f53ad08521df"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.926200 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hndrh" event={"ID":"3d7f2af1-96ec-40cd-864e-aa4b4827fdae","Type":"ContainerStarted","Data":"264e6840b841de1617c6b261fa97728c6ae498a033f0211ba40e222a6d1263db"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.926295 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.929464 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" event={"ID":"365fe3bd-d4b5-481b-a157-45a683747d4c","Type":"ContainerStarted","Data":"96a0b0c4d0c5ba52e682a83d26fc81252948ce031fec977a7c2da8f6bb09e867"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.929519 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" event={"ID":"365fe3bd-d4b5-481b-a157-45a683747d4c","Type":"ContainerStarted","Data":"1b40aeff312e32a59217e0371a7611074c183402c4b0ee01e0c2797ace86ac64"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.929599 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.930196 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" podStartSLOduration=122.930187162 podStartE2EDuration="2m2.930187162s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.928002033 +0000 UTC m=+145.125358295" watchObservedRunningTime="2026-01-29 16:28:37.930187162 +0000 UTC m=+145.127543424" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.931899 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q9qgx" podStartSLOduration=123.931892449 podStartE2EDuration="2m3.931892449s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.899926632 +0000 UTC m=+145.097282894" watchObservedRunningTime="2026-01-29 16:28:37.931892449 +0000 UTC m=+145.129248711" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.939785 4685 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-tr6jn container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.939828 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" podUID="365fe3bd-d4b5-481b-a157-45a683747d4c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.940809 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-hndrh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.940833 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hndrh" podUID="3d7f2af1-96ec-40cd-864e-aa4b4827fdae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.949817 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:37 crc kubenswrapper[4685]: E0129 16:28:37.953334 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.453310129 +0000 UTC m=+145.650666391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.958429 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-hndrh" podStartSLOduration=123.958404317 podStartE2EDuration="2m3.958404317s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.945475387 +0000 UTC m=+145.142831649" watchObservedRunningTime="2026-01-29 16:28:37.958404317 +0000 UTC m=+145.155760579" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.967930 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" event={"ID":"557228b3-f1b9-419d-94c8-e67cf23d6d7e","Type":"ContainerStarted","Data":"e1e6337f6fa1c56b0ca35f02a8be37e444ad7961abed42cebcb206b9fe736ee7"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.967976 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" event={"ID":"557228b3-f1b9-419d-94c8-e67cf23d6d7e","Type":"ContainerStarted","Data":"66e247e6edfedcc0f60521d1dc3605849626f9d7445d2daa65f236e067693ca0"} Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.968135 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.975311 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" podStartSLOduration=122.975285254 podStartE2EDuration="2m2.975285254s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:37.972948591 +0000 UTC m=+145.170304873" watchObservedRunningTime="2026-01-29 16:28:37.975285254 +0000 UTC m=+145.172641516" Jan 29 16:28:37 crc kubenswrapper[4685]: I0129 16:28:37.980030 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" event={"ID":"4d0d94eb-79a0-4144-a9fc-35d3d97d91c0","Type":"ContainerStarted","Data":"23e05f3ffcda67e7bfaeceadb7e9a2dae74a3a82a9757f946138cf2f2b90a284"} Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.063668 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.065361 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.565324845 +0000 UTC m=+145.762681107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.090721 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" podStartSLOduration=123.090693583 podStartE2EDuration="2m3.090693583s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:38.083924449 +0000 UTC m=+145.281280711" watchObservedRunningTime="2026-01-29 16:28:38.090693583 +0000 UTC m=+145.288049845" Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.116682 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.117980 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.118026 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.165991 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.166507 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.666483647 +0000 UTC m=+145.863839909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.270951 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.271283 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.771266427 +0000 UTC m=+145.968622689 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.374103 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.376829 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.876782607 +0000 UTC m=+146.074138869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.476650 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.476876 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.976842079 +0000 UTC m=+146.174198341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.477558 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.478207 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:38.978185566 +0000 UTC m=+146.175541828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.578340 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.578594 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.078546726 +0000 UTC m=+146.275902998 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.578781 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.579187 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.079169913 +0000 UTC m=+146.276526175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.680083 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.680270 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.180240672 +0000 UTC m=+146.377596944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.680353 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.680675 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.180663824 +0000 UTC m=+146.378020086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.781640 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.781782 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.281756074 +0000 UTC m=+146.479112336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.782233 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.782701 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.28268535 +0000 UTC m=+146.480041622 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.787739 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-9cgjg" Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.819237 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-lmlp9" podStartSLOduration=124.819197629 podStartE2EDuration="2m4.819197629s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:38.149820266 +0000 UTC m=+145.347176548" watchObservedRunningTime="2026-01-29 16:28:38.819197629 +0000 UTC m=+146.016553891" Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.883661 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.884168 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.3841399 +0000 UTC m=+146.581496162 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:38 crc kubenswrapper[4685]: I0129 16:28:38.986055 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:38 crc kubenswrapper[4685]: E0129 16:28:38.986510 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.486494133 +0000 UTC m=+146.683850385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.024827 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" event={"ID":"3e58813b-dc92-436d-b823-69e4988b407e","Type":"ContainerStarted","Data":"03b7b44be9d7cdf81419fce857e020ec43bfc68b09da62a54f7896695f1c0874"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.024898 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" event={"ID":"3e58813b-dc92-436d-b823-69e4988b407e","Type":"ContainerStarted","Data":"1b55f01e14f0a11b1b142ba35235d3c8d82def477fd3920eac495ead977d7cbe"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.028222 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w6clq" event={"ID":"18a03f5f-03c2-43a4-9b6c-632fc2237398","Type":"ContainerStarted","Data":"f8eb7304d1947b6d7e31268ce7dee5b3179b49e37155754cd5e87cb14192844a"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.039064 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" event={"ID":"8bac27b6-1031-4fe6-8af0-0fd0c026940a","Type":"ContainerStarted","Data":"6526bd7cccbe74a092e1ddd47ffc4859fe66287ebb5a6c16ce52bbb4d556a89b"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.051089 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" event={"ID":"e75dcf7c-3bde-4c6c-bb25-2a2bd7e4efaa","Type":"ContainerStarted","Data":"8a9f9f49fe9482052d7373641982a6e2288f57da5e764537bd60ddf7abe2c07c"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.064555 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" event={"ID":"203a75e9-22e5-4e38-ad0f-7689e0d9805a","Type":"ContainerStarted","Data":"5394dd0bd9a3649f170117e0dac02628440a9b75c66888282d85c4a59a4e29d0"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.069041 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" event={"ID":"6d1b78bd-30ac-4538-8d20-68dbea8999b1","Type":"ContainerStarted","Data":"7938dea1c69c8c941421ba705797401e696d173d1f9d78231e429411677c580a"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.074038 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" event={"ID":"11fd8260-e90d-4e8c-8239-63ad9e0fc627","Type":"ContainerStarted","Data":"f4cbeeba3754dafa0e0a2b362dda8d6896bf5256e63a7e260f125f2190abeca6"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.074063 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" event={"ID":"11fd8260-e90d-4e8c-8239-63ad9e0fc627","Type":"ContainerStarted","Data":"505542ab84104bcfe9d93ae278c411ad6e4d98913a580ad9994163009fb72143"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.086223 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" event={"ID":"edd98a69-ec85-4975-8e33-8ab13fb284d9","Type":"ContainerStarted","Data":"736025f3e672aa201dc3c2e7e19c00e69205791ff6fd8d23983a047c5929056a"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.086259 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" event={"ID":"edd98a69-ec85-4975-8e33-8ab13fb284d9","Type":"ContainerStarted","Data":"cc5177913cfd873bd3ad2324d285fdf7fcfa358f62be142d9bc7f52805bfbebc"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.086559 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.086782 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.586731441 +0000 UTC m=+146.784087713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.087006 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.087303 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.587288656 +0000 UTC m=+146.784644918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.093290 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" event={"ID":"7f1a9054-f580-42de-a694-42d90bc23064","Type":"ContainerStarted","Data":"a94a2aa2eccfa88df2e6482b9bdbe096f2dccdad62e07515b299203f6b4f563c"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.094466 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.096557 4685 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7lbmb container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.096605 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" podUID="7f1a9054-f580-42de-a694-42d90bc23064" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.106307 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" event={"ID":"c8c6290e-9fff-4732-a6f0-07c463ac2f00","Type":"ContainerStarted","Data":"bd3e52e0f6b47b48c25678e196dc00e34f38e6b2a9e52e56351a607838480bda"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.116193 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" event={"ID":"de69b975-f766-4c92-bb26-53fd18f4886c","Type":"ContainerStarted","Data":"1d25db4bb9750032a7c2e0274d634396a95ca64b1e176fddbab34d6f7630733f"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.116241 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" event={"ID":"de69b975-f766-4c92-bb26-53fd18f4886c","Type":"ContainerStarted","Data":"137dff60ab75000f7557830c243f32e98785b22862e7a9eea434921e38f378f7"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.117752 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cjfmq" podStartSLOduration=124.117732031 podStartE2EDuration="2m4.117732031s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.084038888 +0000 UTC m=+146.281395160" watchObservedRunningTime="2026-01-29 16:28:39.117732031 +0000 UTC m=+146.315088293" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.118601 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-g2nd4" podStartSLOduration=125.118594234 podStartE2EDuration="2m5.118594234s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.116341814 +0000 UTC m=+146.313698096" watchObservedRunningTime="2026-01-29 16:28:39.118594234 +0000 UTC m=+146.315950496" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.119863 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.119907 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.124724 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" event={"ID":"9772712a-fbc8-400e-950a-049a420c3b6e","Type":"ContainerStarted","Data":"4d01c1a991bb066ceac60004aaa004426214d5ac691c56da3f04c7472cd9ea33"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.127722 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" event={"ID":"829122ce-0117-4939-b96b-39bf16965baa","Type":"ContainerStarted","Data":"34f6b50d49a61d5a2c7e493cb75148b0614b1d788cf92693983867d9be7aefa9"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.128346 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.130427 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" event={"ID":"eefb747e-9461-4086-8f22-03f6730c26ed","Type":"ContainerStarted","Data":"60883f6208782d56f90382b8fd8c2239a7fb593743ded7bda70a2abb3a4621c9"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.132337 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" event={"ID":"092013a3-0a24-4a94-ba85-8fe88c4c50d4","Type":"ContainerStarted","Data":"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.132961 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.137517 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" event={"ID":"2b96dae6-a630-4803-ac16-b0853fa32542","Type":"ContainerStarted","Data":"a417353a0a5ac60cf00c7eaa8ea90c5e92aaff0e69d7393b117b46d60be8d871"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.139121 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jtc28 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.139175 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" podUID="829122ce-0117-4939-b96b-39bf16965baa" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.139522 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rzswn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.139598 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.149030 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.149854 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.158837 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8t9k2" podStartSLOduration=125.158823015 podStartE2EDuration="2m5.158823015s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.157784896 +0000 UTC m=+146.355141178" watchObservedRunningTime="2026-01-29 16:28:39.158823015 +0000 UTC m=+146.356179277" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.159276 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" event={"ID":"09081f63-559c-46a8-8118-3ba2b9b9754a","Type":"ContainerStarted","Data":"4f1f23894a2f2a4b0a30f627e16088dd472e32e0004ca8972b62a2cd0a768fa0"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.163574 4685 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-cngbb container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.163632 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" podUID="a4c55a83-d9fb-4ae1-8d1a-d18b07c2f946" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.178939 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" event={"ID":"6ba8cfc8-5a1b-49c0-9ea8-038965d33587","Type":"ContainerStarted","Data":"9cf9508729fc7c3a711c133fb078fab3e948c0b234ab8720445fc6039c71ef37"} Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.179976 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-hndrh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.180014 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hndrh" podUID="3d7f2af1-96ec-40cd-864e-aa4b4827fdae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.180297 4685 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-94l2w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.180332 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.180928 4685 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-tr6jn container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.180955 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" podUID="365fe3bd-d4b5-481b-a157-45a683747d4c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.181047 4685 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-gn4d6 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.181070 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.181138 4685 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xgmc2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.181158 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.189286 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.190584 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.690570385 +0000 UTC m=+146.887926647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.207540 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.207852 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.222170 4685 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hlrxs container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.222236 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" podUID="2b96dae6-a630-4803-ac16-b0853fa32542" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.242645 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-b9dbg" podStartSLOduration=125.242624037 podStartE2EDuration="2m5.242624037s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.21213174 +0000 UTC m=+146.409488012" watchObservedRunningTime="2026-01-29 16:28:39.242624037 +0000 UTC m=+146.439980299" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.243053 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" podStartSLOduration=125.243049448 podStartE2EDuration="2m5.243049448s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.241749892 +0000 UTC m=+146.439106174" watchObservedRunningTime="2026-01-29 16:28:39.243049448 +0000 UTC m=+146.440405710" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.292503 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.301187 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.801172094 +0000 UTC m=+146.998528346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.347045 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-ctzzw" podStartSLOduration=125.347025976 podStartE2EDuration="2m5.347025976s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.281264833 +0000 UTC m=+146.478621105" watchObservedRunningTime="2026-01-29 16:28:39.347025976 +0000 UTC m=+146.544382238" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.347199 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" podStartSLOduration=125.347194301 podStartE2EDuration="2m5.347194301s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.340657704 +0000 UTC m=+146.538013976" watchObservedRunningTime="2026-01-29 16:28:39.347194301 +0000 UTC m=+146.544550563" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.366758 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" podStartSLOduration=125.366741151 podStartE2EDuration="2m5.366741151s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.36377749 +0000 UTC m=+146.561133762" watchObservedRunningTime="2026-01-29 16:28:39.366741151 +0000 UTC m=+146.564097413" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.399093 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.399419 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.899382446 +0000 UTC m=+147.096738718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.399639 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.400062 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:39.900050904 +0000 UTC m=+147.097407166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.430250 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" podStartSLOduration=124.430232792 podStartE2EDuration="2m4.430232792s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.429718347 +0000 UTC m=+146.627074629" watchObservedRunningTime="2026-01-29 16:28:39.430232792 +0000 UTC m=+146.627589054" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.431963 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xnmb9" podStartSLOduration=125.431954688 podStartE2EDuration="2m5.431954688s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.398573403 +0000 UTC m=+146.595929665" watchObservedRunningTime="2026-01-29 16:28:39.431954688 +0000 UTC m=+146.629310960" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.473840 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-vqfz4" podStartSLOduration=124.473811713 podStartE2EDuration="2m4.473811713s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.452426583 +0000 UTC m=+146.649782855" watchObservedRunningTime="2026-01-29 16:28:39.473811713 +0000 UTC m=+146.671167975" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.499152 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" podStartSLOduration=125.499130849 podStartE2EDuration="2m5.499130849s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.475749925 +0000 UTC m=+146.673106217" watchObservedRunningTime="2026-01-29 16:28:39.499130849 +0000 UTC m=+146.696487111" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.502759 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.503121 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.003104697 +0000 UTC m=+147.200460969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.531259 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" podStartSLOduration=124.53123739 podStartE2EDuration="2m4.53123739s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.502058438 +0000 UTC m=+146.699414710" watchObservedRunningTime="2026-01-29 16:28:39.53123739 +0000 UTC m=+146.728593652" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.531568 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-t8jrg" podStartSLOduration=125.531562468 podStartE2EDuration="2m5.531562468s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:39.529533593 +0000 UTC m=+146.726889855" watchObservedRunningTime="2026-01-29 16:28:39.531562468 +0000 UTC m=+146.728918730" Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.604585 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.605149 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.105137912 +0000 UTC m=+147.302494164 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.706223 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.706630 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.206610972 +0000 UTC m=+147.403967234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.807859 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.808283 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.308263669 +0000 UTC m=+147.505620021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.909113 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.909318 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.409297557 +0000 UTC m=+147.606653819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:39 crc kubenswrapper[4685]: I0129 16:28:39.909493 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:39 crc kubenswrapper[4685]: E0129 16:28:39.909905 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.409857172 +0000 UTC m=+147.607213504 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.011208 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.011858 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.511839966 +0000 UTC m=+147.709196238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.112726 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.113062 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.613050729 +0000 UTC m=+147.810406991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.119505 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:40 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:40 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:40 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.119538 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.137814 4685 csr.go:261] certificate signing request csr-br6vr is approved, waiting to be issued Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.150498 4685 csr.go:257] certificate signing request csr-br6vr is issued Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.183526 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf6h6" event={"ID":"09081f63-559c-46a8-8118-3ba2b9b9754a","Type":"ContainerStarted","Data":"d0d2cc086c73c492a8c8272693a6227b10133ec7973121570c861f73345c8ec5"} Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.185510 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" event={"ID":"c8c6290e-9fff-4732-a6f0-07c463ac2f00","Type":"ContainerStarted","Data":"d651969bb04847059d4c1811d7d921a2a22b35f2921de329dcd65de68f01dd99"} Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.187269 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w6clq" event={"ID":"18a03f5f-03c2-43a4-9b6c-632fc2237398","Type":"ContainerStarted","Data":"ec049514b72c3d391dfc21dfa4014122ad1e8312d19dc3e41bbb4cff662ff9f8"} Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.187649 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.189287 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zbcw2" event={"ID":"eefb747e-9461-4086-8f22-03f6730c26ed","Type":"ContainerStarted","Data":"690b30915f0614a958c7ad698edd2578cef0410d211e52bdc6f1082dfd4a4c93"} Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.191749 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jtc28 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.191778 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" podUID="829122ce-0117-4939-b96b-39bf16965baa" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.192325 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" event={"ID":"8bac27b6-1031-4fe6-8af0-0fd0c026940a","Type":"ContainerStarted","Data":"3cd2fc37776a882fab11167094c867afbbf9c2d7514efc1bbc9748824707857d"} Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193507 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rzswn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193534 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193790 4685 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7lbmb container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193840 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" podUID="7f1a9054-f580-42de-a694-42d90bc23064" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193929 4685 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xgmc2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.193963 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.213753 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.214189 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.714173261 +0000 UTC m=+147.911529523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.217801 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-cdm4c" podStartSLOduration=125.217780738 podStartE2EDuration="2m5.217780738s" podCreationTimestamp="2026-01-29 16:26:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:40.215449355 +0000 UTC m=+147.412805647" watchObservedRunningTime="2026-01-29 16:28:40.217780738 +0000 UTC m=+147.415137000" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.274807 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-w6clq" podStartSLOduration=8.274780923 podStartE2EDuration="8.274780923s" podCreationTimestamp="2026-01-29 16:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:40.271160335 +0000 UTC m=+147.468516617" watchObservedRunningTime="2026-01-29 16:28:40.274780923 +0000 UTC m=+147.472137185" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.286511 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-5ljxg" podStartSLOduration=126.28649035 podStartE2EDuration="2m6.28649035s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:40.239928099 +0000 UTC m=+147.437284391" watchObservedRunningTime="2026-01-29 16:28:40.28649035 +0000 UTC m=+147.483846612" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.318082 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.322119 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.822085055 +0000 UTC m=+148.019441317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.419617 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.419816 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.919785804 +0000 UTC m=+148.117142066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.420029 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.420347 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:40.920340119 +0000 UTC m=+148.117696381 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.424492 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-v2t2v" podStartSLOduration=126.424470991 podStartE2EDuration="2m6.424470991s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:40.421594593 +0000 UTC m=+147.618950865" watchObservedRunningTime="2026-01-29 16:28:40.424470991 +0000 UTC m=+147.621827253" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.519226 4685 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-hb7f9 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.519261 4685 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-hb7f9 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.519273 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" podUID="244a8f0b-1588-493c-bc5e-20eba91a40d5" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.519302 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" podUID="244a8f0b-1588-493c-bc5e-20eba91a40d5" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.521347 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.521664 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.021650914 +0000 UTC m=+148.219007176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.622624 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.622934 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.122922879 +0000 UTC m=+148.320279141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.723719 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.723869 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.223844165 +0000 UTC m=+148.421200427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.723924 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.724224 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.224212266 +0000 UTC m=+148.421568528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.825658 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.825859 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.32583221 +0000 UTC m=+148.523188472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.826155 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.826537 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.326527968 +0000 UTC m=+148.523884230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:40 crc kubenswrapper[4685]: I0129 16:28:40.927471 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:40 crc kubenswrapper[4685]: E0129 16:28:40.927805 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.427791203 +0000 UTC m=+148.625147465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.029187 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.029538 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.529525411 +0000 UTC m=+148.726881673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.119725 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:41 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:41 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:41 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.119787 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.130367 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.130892 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.630873298 +0000 UTC m=+148.828229570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.156780 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-29 16:23:40 +0000 UTC, rotation deadline is 2026-12-09 21:17:20.770000448 +0000 UTC Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.156830 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7540h48m39.613174077s for next certificate rotation Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.198634 4685 generic.go:334] "Generic (PLEG): container finished" podID="260430db-c4ee-45e2-9f66-249c413e7e74" containerID="c0c795014e661df620df995bb5000e82ffb7cddbecf170d1fcea4a803e9f0c53" exitCode=0 Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.198722 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" event={"ID":"260430db-c4ee-45e2-9f66-249c413e7e74","Type":"ContainerDied","Data":"c0c795014e661df620df995bb5000e82ffb7cddbecf170d1fcea4a803e9f0c53"} Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.201608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" event={"ID":"e8aadbc9-3235-4121-b2db-19b256575d9a","Type":"ContainerStarted","Data":"5256bb9c6f446d3383d50928d310db7fd62d8fe9a6e44fa03c3878fb5a2406fb"} Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.206203 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rzswn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.206274 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.232642 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.233061 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.733039217 +0000 UTC m=+148.930395489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.235950 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7lbmb" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.333374 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.334466 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.834450635 +0000 UTC m=+149.031806887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.435025 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.435374 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:41.935361461 +0000 UTC m=+149.132717723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.536009 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.536203 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.036174383 +0000 UTC m=+149.233530655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.536473 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.536783 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.03677134 +0000 UTC m=+149.234127602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637285 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.637478 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.137453849 +0000 UTC m=+149.334810111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637535 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637571 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637592 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637639 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.637669 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.638891 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.138883988 +0000 UTC m=+149.336240250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.639080 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.642870 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.643000 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.657317 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.738962 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.739175 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.239147105 +0000 UTC m=+149.436503367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.739360 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.739676 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.239668149 +0000 UTC m=+149.437024411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.840277 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.840414 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.340377789 +0000 UTC m=+149.537734051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.840729 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.841478 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.341446778 +0000 UTC m=+149.538803210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.924846 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.939886 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.942695 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.942873 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.442845347 +0000 UTC m=+149.640201619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.943093 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:41 crc kubenswrapper[4685]: E0129 16:28:41.943499 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.443488784 +0000 UTC m=+149.640845256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:41 crc kubenswrapper[4685]: I0129 16:28:41.952781 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.000833 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.003149 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.008178 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.024294 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.046873 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.047038 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.54700005 +0000 UTC m=+149.744356312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.047369 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.047514 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjpfl\" (UniqueName: \"kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.047613 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.048551 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.548542891 +0000 UTC m=+149.745899153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.048821 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.129590 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:42 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:42 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:42 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.129636 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.152888 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.153075 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.153111 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjpfl\" (UniqueName: \"kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.153179 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.153593 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.153851 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.153880 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.653866637 +0000 UTC m=+149.851222899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.199269 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjpfl\" (UniqueName: \"kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl\") pod \"certified-operators-pk7q7\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.202097 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.209924 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jtc28 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.209972 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" podUID="829122ce-0117-4939-b96b-39bf16965baa" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.223709 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.228017 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.256094 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.256169 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.256201 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxbbn\" (UniqueName: \"kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.256243 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.256806 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.756794727 +0000 UTC m=+149.954150979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.259597 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.358985 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.359334 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.359472 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.359568 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxbbn\" (UniqueName: \"kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.361696 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.361804 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.861783062 +0000 UTC m=+150.059139324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.362109 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.380920 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.394539 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.395753 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.412271 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.433258 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxbbn\" (UniqueName: \"kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn\") pod \"community-operators-7x5cm\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482649 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482698 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482716 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482759 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg486\" (UniqueName: \"kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482783 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.482851 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.484720 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:42.984702184 +0000 UTC m=+150.182058436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.488064 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28e2de4c-c48c-4160-87df-b5a8d213a203-metrics-certs\") pod \"network-metrics-daemon-rpctp\" (UID: \"28e2de4c-c48c-4160-87df-b5a8d213a203\") " pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.592198 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.592603 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg486\" (UniqueName: \"kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.592681 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.592713 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.593563 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.093541155 +0000 UTC m=+150.290897417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.594326 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.594604 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.598803 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.610496 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.610617 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.631654 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpctp" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.654551 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg486\" (UniqueName: \"kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486\") pod \"certified-operators-k8cws\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.695027 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkfdq\" (UniqueName: \"kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.695413 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.695466 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.695524 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.696002 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.195990781 +0000 UTC m=+150.393347043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.796808 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.797104 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.797137 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkfdq\" (UniqueName: \"kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.797159 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.797606 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.797671 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.297657167 +0000 UTC m=+150.495013429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.797889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.820383 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.848815 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkfdq\" (UniqueName: \"kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq\") pod \"community-operators-8h7mh\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.882503 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:42 crc kubenswrapper[4685]: I0129 16:28:42.900831 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:42 crc kubenswrapper[4685]: E0129 16:28:42.901128 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.401115531 +0000 UTC m=+150.598471783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:42 crc kubenswrapper[4685]: W0129 16:28:42.947457 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-dab61ffe4a68bef865edd773cf1b59c8034e5c47c2d3325bfda6f2f4edd296bb WatchSource:0}: Error finding container dab61ffe4a68bef865edd773cf1b59c8034e5c47c2d3325bfda6f2f4edd296bb: Status 404 returned error can't find the container with id dab61ffe4a68bef865edd773cf1b59c8034e5c47c2d3325bfda6f2f4edd296bb Jan 29 16:28:42 crc kubenswrapper[4685]: W0129 16:28:42.994481 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-3078b713aa4b843bb79d7938c4b919bf3edecc7f93a90177e8a0d06c63782b72 WatchSource:0}: Error finding container 3078b713aa4b843bb79d7938c4b919bf3edecc7f93a90177e8a0d06c63782b72: Status 404 returned error can't find the container with id 3078b713aa4b843bb79d7938c4b919bf3edecc7f93a90177e8a0d06c63782b72 Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.008717 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h27nz\" (UniqueName: \"kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz\") pod \"260430db-c4ee-45e2-9f66-249c413e7e74\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.008799 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.008883 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.008975 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume\") pod \"260430db-c4ee-45e2-9f66-249c413e7e74\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.009043 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume\") pod \"260430db-c4ee-45e2-9f66-249c413e7e74\" (UID: \"260430db-c4ee-45e2-9f66-249c413e7e74\") " Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.009288 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.509270482 +0000 UTC m=+150.706626744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.009851 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume" (OuterVolumeSpecName: "config-volume") pod "260430db-c4ee-45e2-9f66-249c413e7e74" (UID: "260430db-c4ee-45e2-9f66-249c413e7e74"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.044640 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz" (OuterVolumeSpecName: "kube-api-access-h27nz") pod "260430db-c4ee-45e2-9f66-249c413e7e74" (UID: "260430db-c4ee-45e2-9f66-249c413e7e74"). InnerVolumeSpecName "kube-api-access-h27nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.045757 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "260430db-c4ee-45e2-9f66-249c413e7e74" (UID: "260430db-c4ee-45e2-9f66-249c413e7e74"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.111891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.111989 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/260430db-c4ee-45e2-9f66-249c413e7e74-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.112006 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/260430db-c4ee-45e2-9f66-249c413e7e74-config-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.112018 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h27nz\" (UniqueName: \"kubernetes.io/projected/260430db-c4ee-45e2-9f66-249c413e7e74-kube-api-access-h27nz\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.112327 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.612312686 +0000 UTC m=+150.809668958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.147127 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:43 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:43 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:43 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.147178 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.213424 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.213767 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.713741675 +0000 UTC m=+150.911097937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.213959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.214417 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.714405343 +0000 UTC m=+150.911761605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.276200 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3078b713aa4b843bb79d7938c4b919bf3edecc7f93a90177e8a0d06c63782b72"} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.297683 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7e4c5bcbf44b4daaa3875e9b33f50cac15aeedfa77b2be73d1e7473189f6bcd9"} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.297741 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"067fb1ae396d2960fb4ac577fb27d80a5a61a34e647533b0f34cd546851b3c2d"} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.298654 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.317428 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.317566 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"dab61ffe4a68bef865edd773cf1b59c8034e5c47c2d3325bfda6f2f4edd296bb"} Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.317872 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.817851147 +0000 UTC m=+151.015207409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.340887 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" event={"ID":"e8aadbc9-3235-4121-b2db-19b256575d9a","Type":"ContainerStarted","Data":"64236f0c41a945816ad58440a0ef5cb8f1acd6ceaf68a43eca7f8f4fb7a8b740"} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.342912 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" event={"ID":"260430db-c4ee-45e2-9f66-249c413e7e74","Type":"ContainerDied","Data":"4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773"} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.342932 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c246fac55fd74a0b4cc47daa1b4e9f534ac34ee3c865f2ab6c2c742cf461773" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.343015 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495055-gwglr" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.351687 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.378671 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.401583 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rpctp"] Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.424031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.425164 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:43.925149086 +0000 UTC m=+151.122505418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.525855 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.526260 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:44.026243176 +0000 UTC m=+151.223599438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.532515 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hb7f9" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.628923 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.629612 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-29 16:28:44.129597457 +0000 UTC m=+151.326953719 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbgv8" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.649637 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.686895 4685 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 29 16:28:43 crc kubenswrapper[4685]: W0129 16:28:43.700647 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod955e6277_1f7d_4d59_8987_3618ed745fc5.slice/crio-a26079c8b8eb59c9af480fab196ead843d2b2f82190ab1c48c2ce1c7d5f28737 WatchSource:0}: Error finding container a26079c8b8eb59c9af480fab196ead843d2b2f82190ab1c48c2ce1c7d5f28737: Status 404 returned error can't find the container with id a26079c8b8eb59c9af480fab196ead843d2b2f82190ab1c48c2ce1c7d5f28737 Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.752999 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: E0129 16:28:43.755028 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-29 16:28:44.254840221 +0000 UTC m=+151.452196493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.788161 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.791107 4685 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-29T16:28:43.686920561Z","Handler":null,"Name":""} Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.852722 4685 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.852792 4685 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.857145 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.869471 4685 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.869511 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.934867 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbgv8\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.959835 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 29 16:28:43 crc kubenswrapper[4685]: I0129 16:28:43.986560 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.066888 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.125781 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:44 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:44 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:44 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.126228 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.171653 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.185524 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.185765 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="260430db-c4ee-45e2-9f66-249c413e7e74" containerName="collect-profiles" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.185784 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="260430db-c4ee-45e2-9f66-249c413e7e74" containerName="collect-profiles" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.185912 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="260430db-c4ee-45e2-9f66-249c413e7e74" containerName="collect-profiles" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.186756 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.189200 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.191425 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cngbb" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.220127 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.227557 4685 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hlrxs container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]log ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]etcd ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/generic-apiserver-start-informers ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/max-in-flight-filter ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 29 16:28:44 crc kubenswrapper[4685]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 29 16:28:44 crc kubenswrapper[4685]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/project.openshift.io-projectcache ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/openshift.io-startinformers ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 29 16:28:44 crc kubenswrapper[4685]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 29 16:28:44 crc kubenswrapper[4685]: livez check failed Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.227644 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" podUID="2b96dae6-a630-4803-ac16-b0853fa32542" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.260246 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.261294 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.267978 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.268223 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.274465 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.274524 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ztg7\" (UniqueName: \"kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.274652 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.286851 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.381031 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.381130 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.381166 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ztg7\" (UniqueName: \"kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.385331 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.385807 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.385893 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.387072 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4f83ade90b9e05eeb832a4efc6a68d8d034e2a8877173d36986d3c99f8244bcf"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.387297 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.418073 4685 generic.go:334] "Generic (PLEG): container finished" podID="13f56e43-a938-4682-9b5a-14fe7af129e5" containerID="64c771286458a3fa211c10ce555e549d2189c2e625876360788f9251509191d2" exitCode=0 Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.418607 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k8cws" event={"ID":"13f56e43-a938-4682-9b5a-14fe7af129e5","Type":"ContainerDied","Data":"64c771286458a3fa211c10ce555e549d2189c2e625876360788f9251509191d2"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.418664 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k8cws" event={"ID":"13f56e43-a938-4682-9b5a-14fe7af129e5","Type":"ContainerStarted","Data":"a6a63eb72f4b9c4d9699034de7344a615080163c3ae087337a966ae03d332b26"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.427309 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.436537 4685 generic.go:334] "Generic (PLEG): container finished" podID="48e58904-04a7-4e69-91a4-61118022ec0b" containerID="b9cc69678fef537eb8c11fb60556f9a43b340e59e0cfe000bbda94391080b569" exitCode=0 Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.437683 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x5cm" event={"ID":"48e58904-04a7-4e69-91a4-61118022ec0b","Type":"ContainerDied","Data":"b9cc69678fef537eb8c11fb60556f9a43b340e59e0cfe000bbda94391080b569"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.437918 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x5cm" event={"ID":"48e58904-04a7-4e69-91a4-61118022ec0b","Type":"ContainerStarted","Data":"9c829c04d04c27933b8d5b702b77a17e77c7eb516678f83b43b6c3e53b33ee3a"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.458510 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ztg7\" (UniqueName: \"kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7\") pod \"redhat-marketplace-ksvjv\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.460457 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1e75f6ccc23b8958ba4cc7f077587392d5435802ee3865db19ea7561733b64a2"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.481718 4685 generic.go:334] "Generic (PLEG): container finished" podID="955e6277-1f7d-4d59-8987-3618ed745fc5" containerID="79f472e380b2c61b0cbeb9474c602b9e0b68d5179b216d5497dfa8e3c2cad730" exitCode=0 Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.481814 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h7mh" event={"ID":"955e6277-1f7d-4d59-8987-3618ed745fc5","Type":"ContainerDied","Data":"79f472e380b2c61b0cbeb9474c602b9e0b68d5179b216d5497dfa8e3c2cad730"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.481840 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h7mh" event={"ID":"955e6277-1f7d-4d59-8987-3618ed745fc5","Type":"ContainerStarted","Data":"a26079c8b8eb59c9af480fab196ead843d2b2f82190ab1c48c2ce1c7d5f28737"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.484806 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.489306 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.489433 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.489501 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.502972 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpctp" event={"ID":"28e2de4c-c48c-4160-87df-b5a8d213a203","Type":"ContainerStarted","Data":"601a083ba3b90301042e71118692da041183dbf7855e49018c3d6d7fef21d982"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.503013 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpctp" event={"ID":"28e2de4c-c48c-4160-87df-b5a8d213a203","Type":"ContainerStarted","Data":"1439cb65d7edbee29678f370bb210b750a0f8e5620b526f4b571f2683caf491f"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.513906 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.519036 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.522731 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" event={"ID":"e8aadbc9-3235-4121-b2db-19b256575d9a","Type":"ContainerStarted","Data":"499b898bcffc835af3165356f1bf2b39fb0505e5fd70f2358db49b3a208166ef"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.522780 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" event={"ID":"e8aadbc9-3235-4121-b2db-19b256575d9a","Type":"ContainerStarted","Data":"bf37c74d18f44a0a19386a18d37272140553815d0dcde2d45c98270928d32e0f"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.528868 4685 generic.go:334] "Generic (PLEG): container finished" podID="e14e2172-5fc7-4b65-90fe-4060e571eee5" containerID="d88631ecd1bcacf8b695151181838f76e7ebf03c85636dd05d25df8c50ac1bf8" exitCode=0 Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.529553 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk7q7" event={"ID":"e14e2172-5fc7-4b65-90fe-4060e571eee5","Type":"ContainerDied","Data":"d88631ecd1bcacf8b695151181838f76e7ebf03c85636dd05d25df8c50ac1bf8"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.529585 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk7q7" event={"ID":"e14e2172-5fc7-4b65-90fe-4060e571eee5","Type":"ContainerStarted","Data":"af7738b393cbdbde2984b9864ed267a23707653e69bc97edbacb45a389d57b6b"} Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.559282 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7mtnc" podStartSLOduration=12.559257996 podStartE2EDuration="12.559257996s" podCreationTimestamp="2026-01-29 16:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:44.558048993 +0000 UTC m=+151.755405245" watchObservedRunningTime="2026-01-29 16:28:44.559257996 +0000 UTC m=+151.756614258" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.575724 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.602777 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.604527 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.605098 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.633054 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.633187 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zxbbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x5cm_openshift-marketplace(48e58904-04a7-4e69-91a4-61118022ec0b): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.634141 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.634465 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.637532 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.637755 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg486,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k8cws_openshift-marketplace(13f56e43-a938-4682-9b5a-14fe7af129e5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.639706 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.650759 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.651279 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkfdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8h7mh_openshift-marketplace(955e6277-1f7d-4d59-8987-3618ed745fc5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.653906 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.670005 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.670355 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.675592 4685 patch_prober.go:28] interesting pod/console-f9d7485db-jmf78 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.675655 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jmf78" podUID="6dd08be9-187d-457a-ae06-ead4cb0527c6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.717689 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.717814 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjpfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pk7q7_openshift-marketplace(e14e2172-5fc7-4b65-90fe-4060e571eee5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:44 crc kubenswrapper[4685]: E0129 16:28:44.720744 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.748932 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.762105 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-hndrh container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.762213 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hndrh" podUID="3d7f2af1-96ec-40cd-864e-aa4b4827fdae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.780432 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-hndrh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.780537 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hndrh" podUID="3d7f2af1-96ec-40cd-864e-aa4b4827fdae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.794982 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.795051 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.795099 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl2j7\" (UniqueName: \"kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.824998 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.896682 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.896755 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.896791 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl2j7\" (UniqueName: \"kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.898291 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.899036 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:44 crc kubenswrapper[4685]: I0129 16:28:44.941284 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl2j7\" (UniqueName: \"kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7\") pod \"redhat-marketplace-b7nt8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.096950 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.118360 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.122374 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:45 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:45 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:45 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.122443 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.145344 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.184110 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.185940 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.191799 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.195996 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.241051 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.307786 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.307926 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.307956 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljn4n\" (UniqueName: \"kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.409301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.409347 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljn4n\" (UniqueName: \"kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.409403 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.409872 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.410213 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.445980 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljn4n\" (UniqueName: \"kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n\") pod \"redhat-operators-9wff8\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.475326 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:28:45 crc kubenswrapper[4685]: W0129 16:28:45.489238 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod907873d4_fae4_4bc1_b7ff_a56b329f47a8.slice/crio-a5bf33f00fe6d3b9ecacf3cff6185d6768cc04fde0f0f99fe0b135d3a00e19cd WatchSource:0}: Error finding container a5bf33f00fe6d3b9ecacf3cff6185d6768cc04fde0f0f99fe0b135d3a00e19cd: Status 404 returned error can't find the container with id a5bf33f00fe6d3b9ecacf3cff6185d6768cc04fde0f0f99fe0b135d3a00e19cd Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.500732 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.506416 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.552080 4685 generic.go:334] "Generic (PLEG): container finished" podID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" containerID="978432f84e5e6d28be99ef61d536090b8e94fecd4d2493d9727b9462bab2a5c7" exitCode=0 Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.552158 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ksvjv" event={"ID":"b1edc1c3-3a71-4117-bf81-b3e95b8d2230","Type":"ContainerDied","Data":"978432f84e5e6d28be99ef61d536090b8e94fecd4d2493d9727b9462bab2a5c7"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.552188 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ksvjv" event={"ID":"b1edc1c3-3a71-4117-bf81-b3e95b8d2230","Type":"ContainerStarted","Data":"250a1f204f818509a329d6e7842ef2c17a3f85a55d7d1357b4bad70d09fb86fa"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.558827 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tr6jn" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.563662 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpctp" event={"ID":"28e2de4c-c48c-4160-87df-b5a8d213a203","Type":"ContainerStarted","Data":"b768bc532033be1c8e9c8ea8ca658818a971507bd7e6b993b55fab5f90f344f3"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.569515 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" event={"ID":"14cfc19d-44fb-45de-9abc-df6d268d7cb2","Type":"ContainerStarted","Data":"93e19e1ec631d37ad1f2cd75043991dfe2bfc5c93bcaf5da432272bebe61873f"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.569878 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" event={"ID":"14cfc19d-44fb-45de-9abc-df6d268d7cb2","Type":"ContainerStarted","Data":"3e0dc9c8866a24fcd4c4c546a474258e25598ca4a6feda90fa929bee9dbfccb1"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.570240 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.572067 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5","Type":"ContainerStarted","Data":"5df83e3758e1eaa2fe5a1f2b741bfd2f15e6c46c22b346d8562b2b58f17c8655"} Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.577314 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7nt8" event={"ID":"907873d4-fae4-4bc1-b7ff-a56b329f47a8","Type":"ContainerStarted","Data":"a5bf33f00fe6d3b9ecacf3cff6185d6768cc04fde0f0f99fe0b135d3a00e19cd"} Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.579213 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.579245 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.579304 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.579317 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.586627 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jtc28" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.601241 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.602300 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.627931 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rpctp" podStartSLOduration=132.627913292 podStartE2EDuration="2m12.627913292s" podCreationTimestamp="2026-01-29 16:26:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:45.623191224 +0000 UTC m=+152.820547486" watchObservedRunningTime="2026-01-29 16:28:45.627913292 +0000 UTC m=+152.825269554" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.641376 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.642999 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.656312 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" podStartSLOduration=131.656287891 podStartE2EDuration="2m11.656287891s" podCreationTimestamp="2026-01-29 16:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:45.649318332 +0000 UTC m=+152.846674604" watchObservedRunningTime="2026-01-29 16:28:45.656287891 +0000 UTC m=+152.853644173" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.692112 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.692424 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ztg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ksvjv_openshift-marketplace(b1edc1c3-3a71-4117-bf81-b3e95b8d2230): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:45 crc kubenswrapper[4685]: E0129 16:28:45.694319 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.713506 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.714862 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm2t8\" (UniqueName: \"kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.714980 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.821634 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm2t8\" (UniqueName: \"kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.822039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.822121 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.822750 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.823316 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.854730 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm2t8\" (UniqueName: \"kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8\") pod \"redhat-operators-sjjwk\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.911076 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:28:45 crc kubenswrapper[4685]: I0129 16:28:45.939111 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.119592 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:46 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:46 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:46 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.119661 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.354343 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:28:46 crc kubenswrapper[4685]: W0129 16:28:46.363014 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcc954ee_61b9_492c_b375_2f271c18201c.slice/crio-f0d04f478f2c70eb3a821485d87e70c20c99858634e94c926dc4b3884c264bc6 WatchSource:0}: Error finding container f0d04f478f2c70eb3a821485d87e70c20c99858634e94c926dc4b3884c264bc6: Status 404 returned error can't find the container with id f0d04f478f2c70eb3a821485d87e70c20c99858634e94c926dc4b3884c264bc6 Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.387944 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.388604 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.390682 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.390974 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.396483 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.531275 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.531332 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.583888 4685 generic.go:334] "Generic (PLEG): container finished" podID="c9eff178-9b35-42e8-a94d-ecb975269482" containerID="75ab0e93a012173aa2efb8fcae02e95fbf25f13ebbc7df3187a9706fad8bc054" exitCode=0 Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.583969 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wff8" event={"ID":"c9eff178-9b35-42e8-a94d-ecb975269482","Type":"ContainerDied","Data":"75ab0e93a012173aa2efb8fcae02e95fbf25f13ebbc7df3187a9706fad8bc054"} Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.584001 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wff8" event={"ID":"c9eff178-9b35-42e8-a94d-ecb975269482","Type":"ContainerStarted","Data":"350e005d6c7e397d23aefb939c8ad596f684859c724bef83435aacc0b6be6f01"} Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.594003 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjjwk" event={"ID":"bcc954ee-61b9-492c-b375-2f271c18201c","Type":"ContainerStarted","Data":"f0d04f478f2c70eb3a821485d87e70c20c99858634e94c926dc4b3884c264bc6"} Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.595714 4685 generic.go:334] "Generic (PLEG): container finished" podID="47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" containerID="3f139ea44d8121f225c39e6e32a10ca416a01a0a3c5a09cc4f8be490a32606ef" exitCode=0 Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.595784 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5","Type":"ContainerDied","Data":"3f139ea44d8121f225c39e6e32a10ca416a01a0a3c5a09cc4f8be490a32606ef"} Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.597928 4685 generic.go:334] "Generic (PLEG): container finished" podID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" containerID="cae95ba12a90aeec9abc829c8c9e09fcebe5b6081c5169bab39995801b71299a" exitCode=0 Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.597959 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7nt8" event={"ID":"907873d4-fae4-4bc1-b7ff-a56b329f47a8","Type":"ContainerDied","Data":"cae95ba12a90aeec9abc829c8c9e09fcebe5b6081c5169bab39995801b71299a"} Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.601213 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.632752 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.633072 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.632869 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.679323 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.715916 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.716063 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljn4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9wff8_openshift-marketplace(c9eff178-9b35-42e8-a94d-ecb975269482): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.717209 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.724131 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.724269 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nl2j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b7nt8_openshift-marketplace(907873d4-fae4-4bc1-b7ff-a56b329f47a8): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:46 crc kubenswrapper[4685]: E0129 16:28:46.725449 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:28:46 crc kubenswrapper[4685]: I0129 16:28:46.832694 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.033310 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 29 16:28:47 crc kubenswrapper[4685]: W0129 16:28:47.042566 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod9e8fb3f8_7871_43cf_8332_712448674d0f.slice/crio-334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7 WatchSource:0}: Error finding container 334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7: Status 404 returned error can't find the container with id 334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7 Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.118991 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:47 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:47 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:47 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.119049 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.604560 4685 generic.go:334] "Generic (PLEG): container finished" podID="bcc954ee-61b9-492c-b375-2f271c18201c" containerID="dea5c6023c049f1f5d1c6844e3a557f11cc53ba23cf00828b09dad8ec3295077" exitCode=0 Jan 29 16:28:47 crc kubenswrapper[4685]: E0129 16:28:47.613304 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:28:47 crc kubenswrapper[4685]: E0129 16:28:47.613381 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.614594 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjjwk" event={"ID":"bcc954ee-61b9-492c-b375-2f271c18201c","Type":"ContainerDied","Data":"dea5c6023c049f1f5d1c6844e3a557f11cc53ba23cf00828b09dad8ec3295077"} Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.614638 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e8fb3f8-7871-43cf-8332-712448674d0f","Type":"ContainerStarted","Data":"7c8a617fe43a39007dca64dd63ebdc9530352e3886a8412ba9906e04860a82ca"} Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.614655 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e8fb3f8-7871-43cf-8332-712448674d0f","Type":"ContainerStarted","Data":"334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7"} Jan 29 16:28:47 crc kubenswrapper[4685]: I0129 16:28:47.682567 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=1.682542383 podStartE2EDuration="1.682542383s" podCreationTimestamp="2026-01-29 16:28:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:28:47.6606472 +0000 UTC m=+154.858003482" watchObservedRunningTime="2026-01-29 16:28:47.682542383 +0000 UTC m=+154.879898645" Jan 29 16:28:47 crc kubenswrapper[4685]: E0129 16:28:47.763849 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:28:47 crc kubenswrapper[4685]: E0129 16:28:47.764048 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cm2t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sjjwk_openshift-marketplace(bcc954ee-61b9-492c-b375-2f271c18201c): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:47 crc kubenswrapper[4685]: E0129 16:28:47.765512 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:28:48 crc kubenswrapper[4685]: E0129 16:28:48.615311 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.698305 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:48 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:48 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:48 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.698425 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.790083 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.865015 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir\") pod \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.865165 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" (UID: "47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.865208 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access\") pod \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\" (UID: \"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5\") " Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.865650 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.870061 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" (UID: "47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:28:48 crc kubenswrapper[4685]: I0129 16:28:48.967372 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.120645 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:49 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:49 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:49 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.120703 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.214649 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.219505 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hlrxs" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.621579 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.621596 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5","Type":"ContainerDied","Data":"5df83e3758e1eaa2fe5a1f2b741bfd2f15e6c46c22b346d8562b2b58f17c8655"} Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.621767 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5df83e3758e1eaa2fe5a1f2b741bfd2f15e6c46c22b346d8562b2b58f17c8655" Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.624323 4685 generic.go:334] "Generic (PLEG): container finished" podID="9e8fb3f8-7871-43cf-8332-712448674d0f" containerID="7c8a617fe43a39007dca64dd63ebdc9530352e3886a8412ba9906e04860a82ca" exitCode=0 Jan 29 16:28:49 crc kubenswrapper[4685]: I0129 16:28:49.624521 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e8fb3f8-7871-43cf-8332-712448674d0f","Type":"ContainerDied","Data":"7c8a617fe43a39007dca64dd63ebdc9530352e3886a8412ba9906e04860a82ca"} Jan 29 16:28:50 crc kubenswrapper[4685]: I0129 16:28:50.118551 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:50 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:50 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:50 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:50 crc kubenswrapper[4685]: I0129 16:28:50.118628 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:50 crc kubenswrapper[4685]: I0129 16:28:50.626099 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-w6clq" Jan 29 16:28:50 crc kubenswrapper[4685]: I0129 16:28:50.901243 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.000168 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access\") pod \"9e8fb3f8-7871-43cf-8332-712448674d0f\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.000265 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir\") pod \"9e8fb3f8-7871-43cf-8332-712448674d0f\" (UID: \"9e8fb3f8-7871-43cf-8332-712448674d0f\") " Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.000574 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9e8fb3f8-7871-43cf-8332-712448674d0f" (UID: "9e8fb3f8-7871-43cf-8332-712448674d0f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.009221 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9e8fb3f8-7871-43cf-8332-712448674d0f" (UID: "9e8fb3f8-7871-43cf-8332-712448674d0f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.101595 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8fb3f8-7871-43cf-8332-712448674d0f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.101632 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8fb3f8-7871-43cf-8332-712448674d0f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.119018 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:51 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:51 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:51 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.119100 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.637741 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e8fb3f8-7871-43cf-8332-712448674d0f","Type":"ContainerDied","Data":"334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7"} Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.637794 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="334e03f6417e68b1d888eef071abc94da149a66dbfbb3f6a843035311adc3da7" Jan 29 16:28:51 crc kubenswrapper[4685]: I0129 16:28:51.637857 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 29 16:28:52 crc kubenswrapper[4685]: I0129 16:28:52.119086 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:52 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:52 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:52 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:52 crc kubenswrapper[4685]: I0129 16:28:52.119145 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:53 crc kubenswrapper[4685]: I0129 16:28:53.118896 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:53 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:53 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:53 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:53 crc kubenswrapper[4685]: I0129 16:28:53.119227 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:54 crc kubenswrapper[4685]: I0129 16:28:54.118308 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:54 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:54 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:54 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:54 crc kubenswrapper[4685]: I0129 16:28:54.118617 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:54 crc kubenswrapper[4685]: I0129 16:28:54.668902 4685 patch_prober.go:28] interesting pod/console-f9d7485db-jmf78 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 29 16:28:54 crc kubenswrapper[4685]: I0129 16:28:54.669250 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jmf78" podUID="6dd08be9-187d-457a-ae06-ead4cb0527c6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 29 16:28:54 crc kubenswrapper[4685]: I0129 16:28:54.794788 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-hndrh" Jan 29 16:28:55 crc kubenswrapper[4685]: I0129 16:28:55.119123 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:55 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:55 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:55 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:55 crc kubenswrapper[4685]: I0129 16:28:55.119462 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:56 crc kubenswrapper[4685]: I0129 16:28:56.118710 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:56 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:56 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:56 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:56 crc kubenswrapper[4685]: I0129 16:28:56.119127 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:56 crc kubenswrapper[4685]: E0129 16:28:56.730765 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:28:56 crc kubenswrapper[4685]: E0129 16:28:56.730906 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zxbbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x5cm_openshift-marketplace(48e58904-04a7-4e69-91a4-61118022ec0b): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:56 crc kubenswrapper[4685]: E0129 16:28:56.732170 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:28:57 crc kubenswrapper[4685]: I0129 16:28:57.120210 4685 patch_prober.go:28] interesting pod/router-default-5444994796-87lv2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 29 16:28:57 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Jan 29 16:28:57 crc kubenswrapper[4685]: [+]process-running ok Jan 29 16:28:57 crc kubenswrapper[4685]: healthz check failed Jan 29 16:28:57 crc kubenswrapper[4685]: I0129 16:28:57.120275 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-87lv2" podUID="6716355d-d67e-47ab-97b5-3eea8edf4128" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 29 16:28:58 crc kubenswrapper[4685]: I0129 16:28:58.118328 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:58 crc kubenswrapper[4685]: I0129 16:28:58.122053 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-87lv2" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.742516 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.742749 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nl2j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b7nt8_openshift-marketplace(907873d4-fae4-4bc1-b7ff-a56b329f47a8): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.742909 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.743103 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg486,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k8cws_openshift-marketplace(13f56e43-a938-4682-9b5a-14fe7af129e5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.743241 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.743450 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjpfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pk7q7_openshift-marketplace(e14e2172-5fc7-4b65-90fe-4060e571eee5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.744362 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.744425 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:28:58 crc kubenswrapper[4685]: E0129 16:28:58.744568 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.726887 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.727375 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkfdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8h7mh_openshift-marketplace(955e6277-1f7d-4d59-8987-3618ed745fc5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.728509 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.735145 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.735278 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ztg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ksvjv_openshift-marketplace(b1edc1c3-3a71-4117-bf81-b3e95b8d2230): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:28:59 crc kubenswrapper[4685]: E0129 16:28:59.736571 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:29:00 crc kubenswrapper[4685]: E0129 16:29:00.721433 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:29:00 crc kubenswrapper[4685]: E0129 16:29:00.721575 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljn4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9wff8_openshift-marketplace(c9eff178-9b35-42e8-a94d-ecb975269482): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:00 crc kubenswrapper[4685]: E0129 16:29:00.722727 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:29:00 crc kubenswrapper[4685]: I0129 16:29:00.989346 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:29:00 crc kubenswrapper[4685]: I0129 16:29:00.989654 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" containerID="cri-o://04b442f720c029c31052248b741cbdb9fe23dd36a902860a3c44f45a51295528" gracePeriod=30 Jan 29 16:29:01 crc kubenswrapper[4685]: I0129 16:29:01.017703 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:29:01 crc kubenswrapper[4685]: I0129 16:29:01.017937 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" containerID="cri-o://862e29ea49052b4624b778d4ef27ef121979d641ad586135f5216f595dfb9597" gracePeriod=30 Jan 29 16:29:02 crc kubenswrapper[4685]: I0129 16:29:02.723435 4685 generic.go:334] "Generic (PLEG): container finished" podID="802aac97-996b-4fbc-8616-0ec29f5462df" containerID="04b442f720c029c31052248b741cbdb9fe23dd36a902860a3c44f45a51295528" exitCode=0 Jan 29 16:29:02 crc kubenswrapper[4685]: I0129 16:29:02.723496 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" event={"ID":"802aac97-996b-4fbc-8616-0ec29f5462df","Type":"ContainerDied","Data":"04b442f720c029c31052248b741cbdb9fe23dd36a902860a3c44f45a51295528"} Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.079762 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.331206 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.331268 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.670710 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.674112 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-jmf78" Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.732923 4685 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xgmc2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.732980 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.816222 4685 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-94l2w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 29 16:29:04 crc kubenswrapper[4685]: I0129 16:29:04.816276 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 29 16:29:07 crc kubenswrapper[4685]: E0129 16:29:07.795279 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:29:07 crc kubenswrapper[4685]: E0129 16:29:07.795762 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cm2t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sjjwk_openshift-marketplace(bcc954ee-61b9-492c-b375-2f271c18201c): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:07 crc kubenswrapper[4685]: E0129 16:29:07.796958 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.270032 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.336726 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:09 crc kubenswrapper[4685]: E0129 16:29:09.337052 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e8fb3f8-7871-43cf-8332-712448674d0f" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337079 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e8fb3f8-7871-43cf-8332-712448674d0f" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: E0129 16:29:09.337103 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337112 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: E0129 16:29:09.337125 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337134 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337249 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="47d0a4bc-ea5e-49df-b6ef-fdadf0d432b5" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337263 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e8fb3f8-7871-43cf-8332-712448674d0f" containerName="pruner" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337281 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" containerName="controller-manager" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.337822 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.340920 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.372838 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config\") pod \"802aac97-996b-4fbc-8616-0ec29f5462df\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.372925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p72j5\" (UniqueName: \"kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5\") pod \"802aac97-996b-4fbc-8616-0ec29f5462df\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.373026 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert\") pod \"802aac97-996b-4fbc-8616-0ec29f5462df\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.373060 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca\") pod \"802aac97-996b-4fbc-8616-0ec29f5462df\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.373081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles\") pod \"802aac97-996b-4fbc-8616-0ec29f5462df\" (UID: \"802aac97-996b-4fbc-8616-0ec29f5462df\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.374245 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "802aac97-996b-4fbc-8616-0ec29f5462df" (UID: "802aac97-996b-4fbc-8616-0ec29f5462df"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.374967 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca" (OuterVolumeSpecName: "client-ca") pod "802aac97-996b-4fbc-8616-0ec29f5462df" (UID: "802aac97-996b-4fbc-8616-0ec29f5462df"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.374707 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config" (OuterVolumeSpecName: "config") pod "802aac97-996b-4fbc-8616-0ec29f5462df" (UID: "802aac97-996b-4fbc-8616-0ec29f5462df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.384779 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "802aac97-996b-4fbc-8616-0ec29f5462df" (UID: "802aac97-996b-4fbc-8616-0ec29f5462df"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.390301 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5" (OuterVolumeSpecName: "kube-api-access-p72j5") pod "802aac97-996b-4fbc-8616-0ec29f5462df" (UID: "802aac97-996b-4fbc-8616-0ec29f5462df"). InnerVolumeSpecName "kube-api-access-p72j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475051 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475199 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475251 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475281 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475384 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94wdg\" (UniqueName: \"kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475745 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475787 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475801 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/802aac97-996b-4fbc-8616-0ec29f5462df-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475811 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p72j5\" (UniqueName: \"kubernetes.io/projected/802aac97-996b-4fbc-8616-0ec29f5462df-kube-api-access-p72j5\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.475821 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/802aac97-996b-4fbc-8616-0ec29f5462df-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.577377 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.577489 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.577551 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.577571 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.577611 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94wdg\" (UniqueName: \"kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.578837 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.580114 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.580798 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.583807 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.594705 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94wdg\" (UniqueName: \"kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg\") pod \"controller-manager-56d9bbf66c-7dx74\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.660882 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.770099 4685 generic.go:334] "Generic (PLEG): container finished" podID="e2280720-bbcb-438f-b38e-3b24566ce570" containerID="862e29ea49052b4624b778d4ef27ef121979d641ad586135f5216f595dfb9597" exitCode=0 Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.770183 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" event={"ID":"e2280720-bbcb-438f-b38e-3b24566ce570","Type":"ContainerDied","Data":"862e29ea49052b4624b778d4ef27ef121979d641ad586135f5216f595dfb9597"} Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.775090 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" event={"ID":"802aac97-996b-4fbc-8616-0ec29f5462df","Type":"ContainerDied","Data":"0a739e49fca629511080e4c4d3efc18b612c1cc1f18f2c7355d708c8d834c161"} Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.775146 4685 scope.go:117] "RemoveContainer" containerID="04b442f720c029c31052248b741cbdb9fe23dd36a902860a3c44f45a51295528" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.775202 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xgmc2" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.805865 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.814062 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xgmc2"] Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.820774 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.983979 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert\") pod \"e2280720-bbcb-438f-b38e-3b24566ce570\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.984103 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c24pz\" (UniqueName: \"kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz\") pod \"e2280720-bbcb-438f-b38e-3b24566ce570\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.985331 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config\") pod \"e2280720-bbcb-438f-b38e-3b24566ce570\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.985493 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca\") pod \"e2280720-bbcb-438f-b38e-3b24566ce570\" (UID: \"e2280720-bbcb-438f-b38e-3b24566ce570\") " Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.986326 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca" (OuterVolumeSpecName: "client-ca") pod "e2280720-bbcb-438f-b38e-3b24566ce570" (UID: "e2280720-bbcb-438f-b38e-3b24566ce570"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.987109 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config" (OuterVolumeSpecName: "config") pod "e2280720-bbcb-438f-b38e-3b24566ce570" (UID: "e2280720-bbcb-438f-b38e-3b24566ce570"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.992077 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e2280720-bbcb-438f-b38e-3b24566ce570" (UID: "e2280720-bbcb-438f-b38e-3b24566ce570"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:09 crc kubenswrapper[4685]: I0129 16:29:09.992426 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz" (OuterVolumeSpecName: "kube-api-access-c24pz") pod "e2280720-bbcb-438f-b38e-3b24566ce570" (UID: "e2280720-bbcb-438f-b38e-3b24566ce570"). InnerVolumeSpecName "kube-api-access-c24pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.084549 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.086937 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.086959 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e2280720-bbcb-438f-b38e-3b24566ce570-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.086968 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e2280720-bbcb-438f-b38e-3b24566ce570-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.086979 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c24pz\" (UniqueName: \"kubernetes.io/projected/e2280720-bbcb-438f-b38e-3b24566ce570-kube-api-access-c24pz\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.785925 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" event={"ID":"b292a76b-7926-45e3-8d3e-ec4011fd0db5","Type":"ContainerStarted","Data":"9414beb07455cf851f0f2878a30f24cbb7ed5f2d4ac6d5ce697754cf7fbac5b3"} Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.786590 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" event={"ID":"b292a76b-7926-45e3-8d3e-ec4011fd0db5","Type":"ContainerStarted","Data":"315249021bf5518de5758b795f0dad7399bff11c1f620fe590243cab5e883477"} Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.786639 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.788171 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" event={"ID":"e2280720-bbcb-438f-b38e-3b24566ce570","Type":"ContainerDied","Data":"1c5c0ef4cd80fa76be12a16cc039a3e720d37f2d1f7587b92b5ad98180b299bc"} Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.788246 4685 scope.go:117] "RemoveContainer" containerID="862e29ea49052b4624b778d4ef27ef121979d641ad586135f5216f595dfb9597" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.788284 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.794989 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.829270 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" podStartSLOduration=9.829246702 podStartE2EDuration="9.829246702s" podCreationTimestamp="2026-01-29 16:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:10.824638237 +0000 UTC m=+178.021994509" watchObservedRunningTime="2026-01-29 16:29:10.829246702 +0000 UTC m=+178.026602994" Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.870863 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:29:10 crc kubenswrapper[4685]: I0129 16:29:10.875839 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-94l2w"] Jan 29 16:29:11 crc kubenswrapper[4685]: E0129 16:29:11.603356 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:29:11 crc kubenswrapper[4685]: E0129 16:29:11.603356 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.609169 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="802aac97-996b-4fbc-8616-0ec29f5462df" path="/var/lib/kubelet/pods/802aac97-996b-4fbc-8616-0ec29f5462df/volumes" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.609760 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" path="/var/lib/kubelet/pods/e2280720-bbcb-438f-b38e-3b24566ce570/volumes" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.761286 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:11 crc kubenswrapper[4685]: E0129 16:29:11.761554 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.761568 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.761694 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2280720-bbcb-438f-b38e-3b24566ce570" containerName="route-controller-manager" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.762142 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766313 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766344 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766421 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766456 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766466 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.766489 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.786378 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.917177 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.917239 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.917297 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:11 crc kubenswrapper[4685]: I0129 16:29:11.917331 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8plz\" (UniqueName: \"kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.018949 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.019041 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.019073 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8plz\" (UniqueName: \"kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.019148 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.020369 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.020497 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.026597 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.041623 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8plz\" (UniqueName: \"kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz\") pod \"route-controller-manager-d96fffb69-x44pd\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.079502 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.302136 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:12 crc kubenswrapper[4685]: W0129 16:29:12.323179 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod787f5e49_1901_4724_803c_05881725790f.slice/crio-0a78c5393a65b60cdbeff57ca69ea7e9e599eca88916cd83d1a8a1462d044bae WatchSource:0}: Error finding container 0a78c5393a65b60cdbeff57ca69ea7e9e599eca88916cd83d1a8a1462d044bae: Status 404 returned error can't find the container with id 0a78c5393a65b60cdbeff57ca69ea7e9e599eca88916cd83d1a8a1462d044bae Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.828498 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" event={"ID":"787f5e49-1901-4724-803c-05881725790f","Type":"ContainerStarted","Data":"c3c1f50c68b606b99294e86a7d970b9154e53008d997ac67862e0a7e2ce76c17"} Jan 29 16:29:12 crc kubenswrapper[4685]: I0129 16:29:12.829783 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" event={"ID":"787f5e49-1901-4724-803c-05881725790f","Type":"ContainerStarted","Data":"0a78c5393a65b60cdbeff57ca69ea7e9e599eca88916cd83d1a8a1462d044bae"} Jan 29 16:29:13 crc kubenswrapper[4685]: E0129 16:29:13.609310 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:29:13 crc kubenswrapper[4685]: E0129 16:29:13.610750 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:29:13 crc kubenswrapper[4685]: E0129 16:29:13.611300 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:29:13 crc kubenswrapper[4685]: E0129 16:29:13.611345 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:29:13 crc kubenswrapper[4685]: E0129 16:29:13.612825 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:29:13 crc kubenswrapper[4685]: I0129 16:29:13.833331 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:13 crc kubenswrapper[4685]: I0129 16:29:13.841421 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:13 crc kubenswrapper[4685]: I0129 16:29:13.853360 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" podStartSLOduration=12.853337482 podStartE2EDuration="12.853337482s" podCreationTimestamp="2026-01-29 16:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:13.849715453 +0000 UTC m=+181.047071735" watchObservedRunningTime="2026-01-29 16:29:13.853337482 +0000 UTC m=+181.050693764" Jan 29 16:29:15 crc kubenswrapper[4685]: I0129 16:29:15.175280 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9tdtm" Jan 29 16:29:19 crc kubenswrapper[4685]: E0129 16:29:19.604197 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:29:20 crc kubenswrapper[4685]: I0129 16:29:20.227025 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:29:20 crc kubenswrapper[4685]: I0129 16:29:20.945362 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:20 crc kubenswrapper[4685]: I0129 16:29:20.946783 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" podUID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" containerName="controller-manager" containerID="cri-o://9414beb07455cf851f0f2878a30f24cbb7ed5f2d4ac6d5ce697754cf7fbac5b3" gracePeriod=30 Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.010613 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.010858 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" podUID="787f5e49-1901-4724-803c-05881725790f" containerName="route-controller-manager" containerID="cri-o://c3c1f50c68b606b99294e86a7d970b9154e53008d997ac67862e0a7e2ce76c17" gracePeriod=30 Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.878591 4685 generic.go:334] "Generic (PLEG): container finished" podID="787f5e49-1901-4724-803c-05881725790f" containerID="c3c1f50c68b606b99294e86a7d970b9154e53008d997ac67862e0a7e2ce76c17" exitCode=0 Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.878667 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" event={"ID":"787f5e49-1901-4724-803c-05881725790f","Type":"ContainerDied","Data":"c3c1f50c68b606b99294e86a7d970b9154e53008d997ac67862e0a7e2ce76c17"} Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.880073 4685 generic.go:334] "Generic (PLEG): container finished" podID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" containerID="9414beb07455cf851f0f2878a30f24cbb7ed5f2d4ac6d5ce697754cf7fbac5b3" exitCode=0 Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.880099 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" event={"ID":"b292a76b-7926-45e3-8d3e-ec4011fd0db5","Type":"ContainerDied","Data":"9414beb07455cf851f0f2878a30f24cbb7ed5f2d4ac6d5ce697754cf7fbac5b3"} Jan 29 16:29:21 crc kubenswrapper[4685]: I0129 16:29:21.951530 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 29 16:29:22 crc kubenswrapper[4685]: I0129 16:29:22.081646 4685 patch_prober.go:28] interesting pod/route-controller-manager-d96fffb69-x44pd container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" start-of-body= Jan 29 16:29:22 crc kubenswrapper[4685]: I0129 16:29:22.081708 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" podUID="787f5e49-1901-4724-803c-05881725790f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" Jan 29 16:29:22 crc kubenswrapper[4685]: E0129 16:29:22.739550 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:29:22 crc kubenswrapper[4685]: E0129 16:29:22.739745 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zxbbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x5cm_openshift-marketplace(48e58904-04a7-4e69-91a4-61118022ec0b): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:22 crc kubenswrapper[4685]: E0129 16:29:22.741340 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.049543 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.055798 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.078601 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:23 crc kubenswrapper[4685]: E0129 16:29:23.079817 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" containerName="controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.079876 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" containerName="controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: E0129 16:29:23.079930 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787f5e49-1901-4724-803c-05881725790f" containerName="route-controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.079940 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="787f5e49-1901-4724-803c-05881725790f" containerName="route-controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.080412 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="787f5e49-1901-4724-803c-05881725790f" containerName="route-controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.080449 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" containerName="controller-manager" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.081091 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.113578 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168281 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles\") pod \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168341 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca\") pod \"787f5e49-1901-4724-803c-05881725790f\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168379 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert\") pod \"787f5e49-1901-4724-803c-05881725790f\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168427 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca\") pod \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168475 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config\") pod \"787f5e49-1901-4724-803c-05881725790f\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168526 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94wdg\" (UniqueName: \"kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg\") pod \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168556 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert\") pod \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168601 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8plz\" (UniqueName: \"kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz\") pod \"787f5e49-1901-4724-803c-05881725790f\" (UID: \"787f5e49-1901-4724-803c-05881725790f\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168626 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config\") pod \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\" (UID: \"b292a76b-7926-45e3-8d3e-ec4011fd0db5\") " Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168870 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168933 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.168975 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnbv8\" (UniqueName: \"kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.169188 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca" (OuterVolumeSpecName: "client-ca") pod "787f5e49-1901-4724-803c-05881725790f" (UID: "787f5e49-1901-4724-803c-05881725790f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.170024 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b292a76b-7926-45e3-8d3e-ec4011fd0db5" (UID: "b292a76b-7926-45e3-8d3e-ec4011fd0db5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.170642 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca" (OuterVolumeSpecName: "client-ca") pod "b292a76b-7926-45e3-8d3e-ec4011fd0db5" (UID: "b292a76b-7926-45e3-8d3e-ec4011fd0db5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.170662 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config" (OuterVolumeSpecName: "config") pod "b292a76b-7926-45e3-8d3e-ec4011fd0db5" (UID: "b292a76b-7926-45e3-8d3e-ec4011fd0db5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.171222 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config" (OuterVolumeSpecName: "config") pod "787f5e49-1901-4724-803c-05881725790f" (UID: "787f5e49-1901-4724-803c-05881725790f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.174142 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz" (OuterVolumeSpecName: "kube-api-access-j8plz") pod "787f5e49-1901-4724-803c-05881725790f" (UID: "787f5e49-1901-4724-803c-05881725790f"). InnerVolumeSpecName "kube-api-access-j8plz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.174380 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "787f5e49-1901-4724-803c-05881725790f" (UID: "787f5e49-1901-4724-803c-05881725790f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.174506 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b292a76b-7926-45e3-8d3e-ec4011fd0db5" (UID: "b292a76b-7926-45e3-8d3e-ec4011fd0db5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.199178 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg" (OuterVolumeSpecName: "kube-api-access-94wdg") pod "b292a76b-7926-45e3-8d3e-ec4011fd0db5" (UID: "b292a76b-7926-45e3-8d3e-ec4011fd0db5"). InnerVolumeSpecName "kube-api-access-94wdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.269812 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.269885 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnbv8\" (UniqueName: \"kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.269933 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.269967 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270039 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270053 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270063 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270077 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/787f5e49-1901-4724-803c-05881725790f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270087 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b292a76b-7926-45e3-8d3e-ec4011fd0db5-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270096 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/787f5e49-1901-4724-803c-05881725790f-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270107 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94wdg\" (UniqueName: \"kubernetes.io/projected/b292a76b-7926-45e3-8d3e-ec4011fd0db5-kube-api-access-94wdg\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270117 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b292a76b-7926-45e3-8d3e-ec4011fd0db5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270127 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8plz\" (UniqueName: \"kubernetes.io/projected/787f5e49-1901-4724-803c-05881725790f-kube-api-access-j8plz\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.270843 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.271222 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.273631 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.285879 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnbv8\" (UniqueName: \"kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8\") pod \"route-controller-manager-6d765b47d-h2jpw\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.381772 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.382869 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.385166 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.385512 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.387774 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.421123 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.472851 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.473267 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.574779 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.575092 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.574946 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.592104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.706219 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.801594 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:23 crc kubenswrapper[4685]: W0129 16:29:23.804031 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f608cd3_329a_4583_a82d_33de4ded9065.slice/crio-66e503168d5e7b0061a4416ed2ca585d4a1a016b7c585f3ff81dce86d80413ce WatchSource:0}: Error finding container 66e503168d5e7b0061a4416ed2ca585d4a1a016b7c585f3ff81dce86d80413ce: Status 404 returned error can't find the container with id 66e503168d5e7b0061a4416ed2ca585d4a1a016b7c585f3ff81dce86d80413ce Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.901603 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" event={"ID":"7f608cd3-329a-4583-a82d-33de4ded9065","Type":"ContainerStarted","Data":"66e503168d5e7b0061a4416ed2ca585d4a1a016b7c585f3ff81dce86d80413ce"} Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.903228 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" event={"ID":"787f5e49-1901-4724-803c-05881725790f","Type":"ContainerDied","Data":"0a78c5393a65b60cdbeff57ca69ea7e9e599eca88916cd83d1a8a1462d044bae"} Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.903281 4685 scope.go:117] "RemoveContainer" containerID="c3c1f50c68b606b99294e86a7d970b9154e53008d997ac67862e0a7e2ce76c17" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.903304 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.909421 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" event={"ID":"b292a76b-7926-45e3-8d3e-ec4011fd0db5","Type":"ContainerDied","Data":"315249021bf5518de5758b795f0dad7399bff11c1f620fe590243cab5e883477"} Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.909518 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56d9bbf66c-7dx74" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.924571 4685 scope.go:117] "RemoveContainer" containerID="9414beb07455cf851f0f2878a30f24cbb7ed5f2d4ac6d5ce697754cf7fbac5b3" Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.934689 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.942113 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d96fffb69-x44pd"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.946189 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:23 crc kubenswrapper[4685]: I0129 16:29:23.949588 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-56d9bbf66c-7dx74"] Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.099811 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.724013 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.724492 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkfdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8h7mh_openshift-marketplace(955e6277-1f7d-4d59-8987-3618ed745fc5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.725730 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.729594 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.729718 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nl2j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b7nt8_openshift-marketplace(907873d4-fae4-4bc1-b7ff-a56b329f47a8): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:24 crc kubenswrapper[4685]: E0129 16:29:24.730912 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.915938 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" event={"ID":"7f608cd3-329a-4583-a82d-33de4ded9065","Type":"ContainerStarted","Data":"82b8428c35b811c1293773e8c40bfada8e8456216f39de13f4bdaaa64958d92c"} Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.916189 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.920112 4685 generic.go:334] "Generic (PLEG): container finished" podID="a7db9432-634b-4052-938e-f7c25d2c981d" containerID="0c83de67cbf91a9152cc8e3c4e45129939b483c8ae8d6e3b79474face51a5cc3" exitCode=0 Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.920147 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a7db9432-634b-4052-938e-f7c25d2c981d","Type":"ContainerDied","Data":"0c83de67cbf91a9152cc8e3c4e45129939b483c8ae8d6e3b79474face51a5cc3"} Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.920182 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a7db9432-634b-4052-938e-f7c25d2c981d","Type":"ContainerStarted","Data":"f39d4d3633ed2429d29c1ceb5157eb8137f54404101a38a99dee03262c4359a6"} Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.924296 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:24 crc kubenswrapper[4685]: I0129 16:29:24.937843 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" podStartSLOduration=3.93782681 podStartE2EDuration="3.93782681s" podCreationTimestamp="2026-01-29 16:29:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:24.934812638 +0000 UTC m=+192.132168940" watchObservedRunningTime="2026-01-29 16:29:24.93782681 +0000 UTC m=+192.135183072" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.612293 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="787f5e49-1901-4724-803c-05881725790f" path="/var/lib/kubelet/pods/787f5e49-1901-4724-803c-05881725790f/volumes" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.614092 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b292a76b-7926-45e3-8d3e-ec4011fd0db5" path="/var/lib/kubelet/pods/b292a76b-7926-45e3-8d3e-ec4011fd0db5/volumes" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.728524 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.728694 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljn4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9wff8_openshift-marketplace(c9eff178-9b35-42e8-a94d-ecb975269482): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.730836 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.731917 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.732116 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ztg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ksvjv_openshift-marketplace(b1edc1c3-3a71-4117-bf81-b3e95b8d2230): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:25 crc kubenswrapper[4685]: E0129 16:29:25.734506 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.766609 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.767731 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.769840 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.770055 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.770139 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.770267 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.769856 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.769915 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.779740 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.781532 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.902714 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.903049 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.903170 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dwvk\" (UniqueName: \"kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.903320 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:25 crc kubenswrapper[4685]: I0129 16:29:25.903456 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.004506 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.004545 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.004593 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dwvk\" (UniqueName: \"kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.004681 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.004741 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.005776 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.006084 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.006258 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.014642 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.024162 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dwvk\" (UniqueName: \"kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk\") pod \"controller-manager-7cfbf76d79-svr5p\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.088153 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.154654 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.313966 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir\") pod \"a7db9432-634b-4052-938e-f7c25d2c981d\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.314020 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access\") pod \"a7db9432-634b-4052-938e-f7c25d2c981d\" (UID: \"a7db9432-634b-4052-938e-f7c25d2c981d\") " Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.314550 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a7db9432-634b-4052-938e-f7c25d2c981d" (UID: "a7db9432-634b-4052-938e-f7c25d2c981d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.318826 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a7db9432-634b-4052-938e-f7c25d2c981d" (UID: "a7db9432-634b-4052-938e-f7c25d2c981d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.415548 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7db9432-634b-4052-938e-f7c25d2c981d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.415579 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7db9432-634b-4052-938e-f7c25d2c981d-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.522916 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:26 crc kubenswrapper[4685]: E0129 16:29:26.747909 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:29:26 crc kubenswrapper[4685]: E0129 16:29:26.748444 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjpfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pk7q7_openshift-marketplace(e14e2172-5fc7-4b65-90fe-4060e571eee5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:26 crc kubenswrapper[4685]: E0129 16:29:26.749571 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.932531 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"a7db9432-634b-4052-938e-f7c25d2c981d","Type":"ContainerDied","Data":"f39d4d3633ed2429d29c1ceb5157eb8137f54404101a38a99dee03262c4359a6"} Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.932571 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f39d4d3633ed2429d29c1ceb5157eb8137f54404101a38a99dee03262c4359a6" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.932619 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.938061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" event={"ID":"d78fc4e0-1dbd-4802-8115-1c4452053735","Type":"ContainerStarted","Data":"cca7cb7b4d074c80b3a2ddba87da833f9e6047f0f1559ad72049ce97f59edb76"} Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.938126 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" event={"ID":"d78fc4e0-1dbd-4802-8115-1c4452053735","Type":"ContainerStarted","Data":"50285a1092501d54c8cbbfd1552963d9dfc8aff41be81d9a47301a37da7c1bca"} Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.938287 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.947440 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:26 crc kubenswrapper[4685]: I0129 16:29:26.957480 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" podStartSLOduration=6.957460363 podStartE2EDuration="6.957460363s" podCreationTimestamp="2026-01-29 16:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:26.954747959 +0000 UTC m=+194.152104241" watchObservedRunningTime="2026-01-29 16:29:26.957460363 +0000 UTC m=+194.154816625" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.380604 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 29 16:29:28 crc kubenswrapper[4685]: E0129 16:29:28.381049 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7db9432-634b-4052-938e-f7c25d2c981d" containerName="pruner" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.381062 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7db9432-634b-4052-938e-f7c25d2c981d" containerName="pruner" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.381163 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7db9432-634b-4052-938e-f7c25d2c981d" containerName="pruner" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.381528 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.383849 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.384022 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.391016 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.546747 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.547091 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.547209 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.648682 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.648779 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.648872 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.648889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.648967 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.675515 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: I0129 16:29:28.708848 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:29:28 crc kubenswrapper[4685]: E0129 16:29:28.788321 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:29:28 crc kubenswrapper[4685]: E0129 16:29:28.788690 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg486,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k8cws_openshift-marketplace(13f56e43-a938-4682-9b5a-14fe7af129e5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:28 crc kubenswrapper[4685]: E0129 16:29:28.793470 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:29:29 crc kubenswrapper[4685]: I0129 16:29:29.081692 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 29 16:29:29 crc kubenswrapper[4685]: W0129 16:29:29.088809 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4b2572ad_c614_42ee_88c2_6e8670ba17f8.slice/crio-079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737 WatchSource:0}: Error finding container 079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737: Status 404 returned error can't find the container with id 079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737 Jan 29 16:29:29 crc kubenswrapper[4685]: I0129 16:29:29.974642 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b2572ad-c614-42ee-88c2-6e8670ba17f8","Type":"ContainerStarted","Data":"0706e6ec01f763a1f2ea147749ba8bb951a4d691b25108d0566edb083a7b8b2e"} Jan 29 16:29:29 crc kubenswrapper[4685]: I0129 16:29:29.975181 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b2572ad-c614-42ee-88c2-6e8670ba17f8","Type":"ContainerStarted","Data":"079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737"} Jan 29 16:29:29 crc kubenswrapper[4685]: I0129 16:29:29.995364 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.995340881 podStartE2EDuration="1.995340881s" podCreationTimestamp="2026-01-29 16:29:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:29.988411014 +0000 UTC m=+197.185767296" watchObservedRunningTime="2026-01-29 16:29:29.995340881 +0000 UTC m=+197.192697143" Jan 29 16:29:30 crc kubenswrapper[4685]: E0129 16:29:30.725223 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:29:30 crc kubenswrapper[4685]: E0129 16:29:30.725451 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cm2t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sjjwk_openshift-marketplace(bcc954ee-61b9-492c-b375-2f271c18201c): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:29:30 crc kubenswrapper[4685]: E0129 16:29:30.726733 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:29:34 crc kubenswrapper[4685]: I0129 16:29:34.330760 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:29:34 crc kubenswrapper[4685]: I0129 16:29:34.330908 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:29:34 crc kubenswrapper[4685]: I0129 16:29:34.330958 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:29:34 crc kubenswrapper[4685]: I0129 16:29:34.331464 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:29:34 crc kubenswrapper[4685]: I0129 16:29:34.331549 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de" gracePeriod=600 Jan 29 16:29:35 crc kubenswrapper[4685]: I0129 16:29:35.003499 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de" exitCode=0 Jan 29 16:29:35 crc kubenswrapper[4685]: I0129 16:29:35.003575 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de"} Jan 29 16:29:35 crc kubenswrapper[4685]: I0129 16:29:35.004056 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6"} Jan 29 16:29:35 crc kubenswrapper[4685]: E0129 16:29:35.604120 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:29:36 crc kubenswrapper[4685]: E0129 16:29:36.603572 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:29:36 crc kubenswrapper[4685]: E0129 16:29:36.603629 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:29:39 crc kubenswrapper[4685]: E0129 16:29:39.604838 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:29:40 crc kubenswrapper[4685]: E0129 16:29:40.603841 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:29:40 crc kubenswrapper[4685]: I0129 16:29:40.978424 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:40 crc kubenswrapper[4685]: I0129 16:29:40.978706 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" podUID="d78fc4e0-1dbd-4802-8115-1c4452053735" containerName="controller-manager" containerID="cri-o://cca7cb7b4d074c80b3a2ddba87da833f9e6047f0f1559ad72049ce97f59edb76" gracePeriod=30 Jan 29 16:29:40 crc kubenswrapper[4685]: I0129 16:29:40.997677 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:40 crc kubenswrapper[4685]: I0129 16:29:40.997893 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" podUID="7f608cd3-329a-4583-a82d-33de4ded9065" containerName="route-controller-manager" containerID="cri-o://82b8428c35b811c1293773e8c40bfada8e8456216f39de13f4bdaaa64958d92c" gracePeriod=30 Jan 29 16:29:41 crc kubenswrapper[4685]: E0129 16:29:41.604320 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.041440 4685 generic.go:334] "Generic (PLEG): container finished" podID="d78fc4e0-1dbd-4802-8115-1c4452053735" containerID="cca7cb7b4d074c80b3a2ddba87da833f9e6047f0f1559ad72049ce97f59edb76" exitCode=0 Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.041517 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" event={"ID":"d78fc4e0-1dbd-4802-8115-1c4452053735","Type":"ContainerDied","Data":"cca7cb7b4d074c80b3a2ddba87da833f9e6047f0f1559ad72049ce97f59edb76"} Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.043825 4685 generic.go:334] "Generic (PLEG): container finished" podID="7f608cd3-329a-4583-a82d-33de4ded9065" containerID="82b8428c35b811c1293773e8c40bfada8e8456216f39de13f4bdaaa64958d92c" exitCode=0 Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.043866 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" event={"ID":"7f608cd3-329a-4583-a82d-33de4ded9065","Type":"ContainerDied","Data":"82b8428c35b811c1293773e8c40bfada8e8456216f39de13f4bdaaa64958d92c"} Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.080786 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.238157 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config\") pod \"7f608cd3-329a-4583-a82d-33de4ded9065\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.238301 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnbv8\" (UniqueName: \"kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8\") pod \"7f608cd3-329a-4583-a82d-33de4ded9065\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.238344 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca\") pod \"7f608cd3-329a-4583-a82d-33de4ded9065\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.238439 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert\") pod \"7f608cd3-329a-4583-a82d-33de4ded9065\" (UID: \"7f608cd3-329a-4583-a82d-33de4ded9065\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.240189 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca" (OuterVolumeSpecName: "client-ca") pod "7f608cd3-329a-4583-a82d-33de4ded9065" (UID: "7f608cd3-329a-4583-a82d-33de4ded9065"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.240292 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config" (OuterVolumeSpecName: "config") pod "7f608cd3-329a-4583-a82d-33de4ded9065" (UID: "7f608cd3-329a-4583-a82d-33de4ded9065"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.243892 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8" (OuterVolumeSpecName: "kube-api-access-hnbv8") pod "7f608cd3-329a-4583-a82d-33de4ded9065" (UID: "7f608cd3-329a-4583-a82d-33de4ded9065"). InnerVolumeSpecName "kube-api-access-hnbv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.244079 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7f608cd3-329a-4583-a82d-33de4ded9065" (UID: "7f608cd3-329a-4583-a82d-33de4ded9065"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.246118 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.339859 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert\") pod \"d78fc4e0-1dbd-4802-8115-1c4452053735\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.339976 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dwvk\" (UniqueName: \"kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk\") pod \"d78fc4e0-1dbd-4802-8115-1c4452053735\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340084 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca\") pod \"d78fc4e0-1dbd-4802-8115-1c4452053735\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340105 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles\") pod \"d78fc4e0-1dbd-4802-8115-1c4452053735\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340126 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config\") pod \"d78fc4e0-1dbd-4802-8115-1c4452053735\" (UID: \"d78fc4e0-1dbd-4802-8115-1c4452053735\") " Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340388 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnbv8\" (UniqueName: \"kubernetes.io/projected/7f608cd3-329a-4583-a82d-33de4ded9065-kube-api-access-hnbv8\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340439 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340452 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f608cd3-329a-4583-a82d-33de4ded9065-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.340464 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f608cd3-329a-4583-a82d-33de4ded9065-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.341055 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d78fc4e0-1dbd-4802-8115-1c4452053735" (UID: "d78fc4e0-1dbd-4802-8115-1c4452053735"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.341071 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca" (OuterVolumeSpecName: "client-ca") pod "d78fc4e0-1dbd-4802-8115-1c4452053735" (UID: "d78fc4e0-1dbd-4802-8115-1c4452053735"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.341200 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config" (OuterVolumeSpecName: "config") pod "d78fc4e0-1dbd-4802-8115-1c4452053735" (UID: "d78fc4e0-1dbd-4802-8115-1c4452053735"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.343534 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d78fc4e0-1dbd-4802-8115-1c4452053735" (UID: "d78fc4e0-1dbd-4802-8115-1c4452053735"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.343922 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk" (OuterVolumeSpecName: "kube-api-access-8dwvk") pod "d78fc4e0-1dbd-4802-8115-1c4452053735" (UID: "d78fc4e0-1dbd-4802-8115-1c4452053735"). InnerVolumeSpecName "kube-api-access-8dwvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.441122 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dwvk\" (UniqueName: \"kubernetes.io/projected/d78fc4e0-1dbd-4802-8115-1c4452053735-kube-api-access-8dwvk\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.441159 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.441171 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.441183 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d78fc4e0-1dbd-4802-8115-1c4452053735-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.441193 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d78fc4e0-1dbd-4802-8115-1c4452053735-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:42 crc kubenswrapper[4685]: E0129 16:29:42.603983 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.777781 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:29:42 crc kubenswrapper[4685]: E0129 16:29:42.778342 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78fc4e0-1dbd-4802-8115-1c4452053735" containerName="controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.778366 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78fc4e0-1dbd-4802-8115-1c4452053735" containerName="controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: E0129 16:29:42.778419 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f608cd3-329a-4583-a82d-33de4ded9065" containerName="route-controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.778429 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f608cd3-329a-4583-a82d-33de4ded9065" containerName="route-controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.778627 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f608cd3-329a-4583-a82d-33de4ded9065" containerName="route-controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.778655 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78fc4e0-1dbd-4802-8115-1c4452053735" containerName="controller-manager" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.779264 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.788483 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.947045 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.947108 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgls8\" (UniqueName: \"kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.947150 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:42 crc kubenswrapper[4685]: I0129 16:29:42.947175 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.048555 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.049685 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.050770 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgls8\" (UniqueName: \"kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.050986 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.051242 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.051628 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.051617 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cfbf76d79-svr5p" event={"ID":"d78fc4e0-1dbd-4802-8115-1c4452053735","Type":"ContainerDied","Data":"50285a1092501d54c8cbbfd1552963d9dfc8aff41be81d9a47301a37da7c1bca"} Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.051786 4685 scope.go:117] "RemoveContainer" containerID="cca7cb7b4d074c80b3a2ddba87da833f9e6047f0f1559ad72049ce97f59edb76" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.052280 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.053247 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" event={"ID":"7f608cd3-329a-4583-a82d-33de4ded9065","Type":"ContainerDied","Data":"66e503168d5e7b0061a4416ed2ca585d4a1a016b7c585f3ff81dce86d80413ce"} Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.053280 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.055492 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.066243 4685 scope.go:117] "RemoveContainer" containerID="82b8428c35b811c1293773e8c40bfada8e8456216f39de13f4bdaaa64958d92c" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.084571 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgls8\" (UniqueName: \"kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8\") pod \"route-controller-manager-5cb4f98fc6-4p85s\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.085464 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.087696 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cfbf76d79-svr5p"] Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.091918 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.095553 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d765b47d-h2jpw"] Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.098123 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.490213 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:29:43 crc kubenswrapper[4685]: W0129 16:29:43.495446 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod846be245_527f_471e_92b8_8c3767283a38.slice/crio-bef0eec3f180d6701ffa4676f9a73cbdcac8438d442fe6aafd78a2e6d68d930d WatchSource:0}: Error finding container bef0eec3f180d6701ffa4676f9a73cbdcac8438d442fe6aafd78a2e6d68d930d: Status 404 returned error can't find the container with id bef0eec3f180d6701ffa4676f9a73cbdcac8438d442fe6aafd78a2e6d68d930d Jan 29 16:29:43 crc kubenswrapper[4685]: E0129 16:29:43.605955 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.608083 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f608cd3-329a-4583-a82d-33de4ded9065" path="/var/lib/kubelet/pods/7f608cd3-329a-4583-a82d-33de4ded9065/volumes" Jan 29 16:29:43 crc kubenswrapper[4685]: I0129 16:29:43.609691 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d78fc4e0-1dbd-4802-8115-1c4452053735" path="/var/lib/kubelet/pods/d78fc4e0-1dbd-4802-8115-1c4452053735/volumes" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.064277 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" event={"ID":"846be245-527f-471e-92b8-8c3767283a38","Type":"ContainerStarted","Data":"3f093cb5458d8f944fbde838fd564916cef2d8b58df41c1e936e16852c264873"} Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.064606 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" event={"ID":"846be245-527f-471e-92b8-8c3767283a38","Type":"ContainerStarted","Data":"bef0eec3f180d6701ffa4676f9a73cbdcac8438d442fe6aafd78a2e6d68d930d"} Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.065106 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.093117 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" podStartSLOduration=3.093095862 podStartE2EDuration="3.093095862s" podCreationTimestamp="2026-01-29 16:29:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:44.086684204 +0000 UTC m=+211.284040476" watchObservedRunningTime="2026-01-29 16:29:44.093095862 +0000 UTC m=+211.290452124" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.123233 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.784452 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.785907 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.789725 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.790889 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.790889 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.791419 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.793736 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.795892 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.799469 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.799545 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.874497 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.874556 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.874578 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.874646 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.874685 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhqws\" (UniqueName: \"kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.976015 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhqws\" (UniqueName: \"kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.976118 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.976195 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.976231 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.976303 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.977803 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.979762 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.987539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:44 crc kubenswrapper[4685]: I0129 16:29:44.994580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:45 crc kubenswrapper[4685]: I0129 16:29:45.003807 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhqws\" (UniqueName: \"kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws\") pod \"controller-manager-549fb6dd65-26wgw\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:45 crc kubenswrapper[4685]: I0129 16:29:45.118173 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:45 crc kubenswrapper[4685]: I0129 16:29:45.265565 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" containerID="cri-o://e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce" gracePeriod=15 Jan 29 16:29:45 crc kubenswrapper[4685]: E0129 16:29:45.533187 4685 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23ad2357_54ad_4029_8391_285a005ce5bf.slice/crio-conmon-e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce.scope\": RecentStats: unable to find data in memory cache]" Jan 29 16:29:45 crc kubenswrapper[4685]: I0129 16:29:45.534408 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.093466 4685 generic.go:334] "Generic (PLEG): container finished" podID="23ad2357-54ad-4029-8391-285a005ce5bf" containerID="e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce" exitCode=0 Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.093533 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" event={"ID":"23ad2357-54ad-4029-8391-285a005ce5bf","Type":"ContainerDied","Data":"e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce"} Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.095359 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" event={"ID":"cc54e59d-23eb-42f6-aa84-ebf20c711f2c","Type":"ContainerStarted","Data":"65cf25947ac77232791ed80b126e2916dc9844ebdd5dd70bfa1bae730cb7c9ca"} Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.095446 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" event={"ID":"cc54e59d-23eb-42f6-aa84-ebf20c711f2c","Type":"ContainerStarted","Data":"db6733569fe0cb3348ca02beeff32a1b2518ffdb689bcb740f6daa8cb2bae3b0"} Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.095477 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.110950 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.118364 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" podStartSLOduration=6.118342017 podStartE2EDuration="6.118342017s" podCreationTimestamp="2026-01-29 16:29:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:46.112249181 +0000 UTC m=+213.309605463" watchObservedRunningTime="2026-01-29 16:29:46.118342017 +0000 UTC m=+213.315698289" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.303769 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393402 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393443 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393465 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393499 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v28j\" (UniqueName: \"kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393525 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393561 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393577 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393599 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393642 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393668 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393694 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393720 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393751 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.393767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs\") pod \"23ad2357-54ad-4029-8391-285a005ce5bf\" (UID: \"23ad2357-54ad-4029-8391-285a005ce5bf\") " Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.394293 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.394415 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.394465 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.394497 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.394989 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.399556 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.399868 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.400002 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j" (OuterVolumeSpecName: "kube-api-access-4v28j") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "kube-api-access-4v28j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.401073 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.401137 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.401243 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.401586 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.401678 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.402286 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "23ad2357-54ad-4029-8391-285a005ce5bf" (UID: "23ad2357-54ad-4029-8391-285a005ce5bf"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495023 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495067 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495084 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495097 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495108 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495117 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495126 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495135 4685 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23ad2357-54ad-4029-8391-285a005ce5bf-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495142 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495152 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495160 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495168 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495176 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v28j\" (UniqueName: \"kubernetes.io/projected/23ad2357-54ad-4029-8391-285a005ce5bf-kube-api-access-4v28j\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:46 crc kubenswrapper[4685]: I0129 16:29:46.495186 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ad2357-54ad-4029-8391-285a005ce5bf-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.105038 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.114946 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gn4d6" event={"ID":"23ad2357-54ad-4029-8391-285a005ce5bf","Type":"ContainerDied","Data":"94a042e283457aeeaaa0a015700f8bfd9cbbf4e2878a4ee7517360a559e367f6"} Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.116046 4685 scope.go:117] "RemoveContainer" containerID="e8c6ddf284546a29d04b9dee272e8926ad681bbc28968208d23d5a5bfe1808ce" Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.139310 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.147785 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gn4d6"] Jan 29 16:29:47 crc kubenswrapper[4685]: I0129 16:29:47.608855 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" path="/var/lib/kubelet/pods/23ad2357-54ad-4029-8391-285a005ce5bf/volumes" Jan 29 16:29:48 crc kubenswrapper[4685]: E0129 16:29:48.603790 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:29:50 crc kubenswrapper[4685]: E0129 16:29:50.604604 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:29:50 crc kubenswrapper[4685]: E0129 16:29:50.604725 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:29:51 crc kubenswrapper[4685]: E0129 16:29:51.605916 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:29:52 crc kubenswrapper[4685]: E0129 16:29:52.604577 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.791857 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-rhplj"] Jan 29 16:29:52 crc kubenswrapper[4685]: E0129 16:29:52.792200 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.792222 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.792614 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="23ad2357-54ad-4029-8391-285a005ce5bf" containerName="oauth-openshift" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.793274 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.795784 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.795958 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.796227 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.796337 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.801039 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.801096 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-rhplj"] Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.801257 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.801899 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.801915 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.802074 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.802161 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.803285 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.806938 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.807311 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.814079 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.819300 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.932784 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.932859 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.932903 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-dir\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.932950 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.932986 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933047 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933080 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933127 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-policies\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933160 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933202 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933266 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933316 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n52kc\" (UniqueName: \"kubernetes.io/projected/c34f923f-5c57-41d9-acee-f6db59b8ec4a-kube-api-access-n52kc\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933351 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:52 crc kubenswrapper[4685]: I0129 16:29:52.933701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035145 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n52kc\" (UniqueName: \"kubernetes.io/projected/c34f923f-5c57-41d9-acee-f6db59b8ec4a-kube-api-access-n52kc\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035185 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035214 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035254 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035291 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035327 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-dir\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035365 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035446 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035502 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035536 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035578 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-policies\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035610 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035650 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.035680 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-dir\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.036929 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.037270 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.037545 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-audit-policies\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.038380 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.043277 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.043755 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.044942 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.047876 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.048660 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.048990 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.050517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.053785 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34f923f-5c57-41d9-acee-f6db59b8ec4a-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.059776 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n52kc\" (UniqueName: \"kubernetes.io/projected/c34f923f-5c57-41d9-acee-f6db59b8ec4a-kube-api-access-n52kc\") pod \"oauth-openshift-58d4f98775-rhplj\" (UID: \"c34f923f-5c57-41d9-acee-f6db59b8ec4a\") " pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.119258 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:53 crc kubenswrapper[4685]: I0129 16:29:53.616908 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-rhplj"] Jan 29 16:29:54 crc kubenswrapper[4685]: I0129 16:29:54.148711 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" event={"ID":"c34f923f-5c57-41d9-acee-f6db59b8ec4a","Type":"ContainerStarted","Data":"c7c7acce7374d0b81db8672f42d101b3d232f02c5452fdaeb37eaaaefa8d7978"} Jan 29 16:29:54 crc kubenswrapper[4685]: I0129 16:29:54.149018 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" event={"ID":"c34f923f-5c57-41d9-acee-f6db59b8ec4a","Type":"ContainerStarted","Data":"57f01be305e11ec205e8d1b30ff334f6d37af365c4325e0ac8dca612d88b4469"} Jan 29 16:29:54 crc kubenswrapper[4685]: I0129 16:29:54.149283 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:54 crc kubenswrapper[4685]: I0129 16:29:54.180440 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" podStartSLOduration=34.180425131 podStartE2EDuration="34.180425131s" podCreationTimestamp="2026-01-29 16:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:29:54.176638396 +0000 UTC m=+221.373994668" watchObservedRunningTime="2026-01-29 16:29:54.180425131 +0000 UTC m=+221.377781393" Jan 29 16:29:54 crc kubenswrapper[4685]: I0129 16:29:54.528500 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-58d4f98775-rhplj" Jan 29 16:29:54 crc kubenswrapper[4685]: E0129 16:29:54.603427 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:29:54 crc kubenswrapper[4685]: E0129 16:29:54.603710 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:29:56 crc kubenswrapper[4685]: E0129 16:29:56.604721 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.148196 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt"] Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.149903 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.154405 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt"] Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.154566 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.154730 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.241547 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.241644 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pdpn\" (UniqueName: \"kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.241750 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.343067 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.343469 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.343496 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pdpn\" (UniqueName: \"kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.344840 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.352291 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.359809 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pdpn\" (UniqueName: \"kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn\") pod \"collect-profiles-29495070-cgfgt\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.475322 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.885105 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt"] Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.963086 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:30:00 crc kubenswrapper[4685]: I0129 16:30:00.963351 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" podUID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" containerName="controller-manager" containerID="cri-o://65cf25947ac77232791ed80b126e2916dc9844ebdd5dd70bfa1bae730cb7c9ca" gracePeriod=30 Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.061609 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.062259 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" podUID="846be245-527f-471e-92b8-8c3767283a38" containerName="route-controller-manager" containerID="cri-o://3f093cb5458d8f944fbde838fd564916cef2d8b58df41c1e936e16852c264873" gracePeriod=30 Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.196682 4685 generic.go:334] "Generic (PLEG): container finished" podID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" containerID="65cf25947ac77232791ed80b126e2916dc9844ebdd5dd70bfa1bae730cb7c9ca" exitCode=0 Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.196766 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" event={"ID":"cc54e59d-23eb-42f6-aa84-ebf20c711f2c","Type":"ContainerDied","Data":"65cf25947ac77232791ed80b126e2916dc9844ebdd5dd70bfa1bae730cb7c9ca"} Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.197888 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" event={"ID":"93a4a943-8dfb-4e88-ba12-d0b3f49e4590","Type":"ContainerStarted","Data":"32bfd06e153ade4241df77851151e8669cda7ce6f118fd59b65ba9776c65efa7"} Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.197934 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" event={"ID":"93a4a943-8dfb-4e88-ba12-d0b3f49e4590","Type":"ContainerStarted","Data":"db4f04b37a2fc53d27d836bf8c3147c4e0d1ec6a80c0d1364531b64d252a52d9"} Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.199579 4685 generic.go:334] "Generic (PLEG): container finished" podID="846be245-527f-471e-92b8-8c3767283a38" containerID="3f093cb5458d8f944fbde838fd564916cef2d8b58df41c1e936e16852c264873" exitCode=0 Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.199619 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" event={"ID":"846be245-527f-471e-92b8-8c3767283a38","Type":"ContainerDied","Data":"3f093cb5458d8f944fbde838fd564916cef2d8b58df41c1e936e16852c264873"} Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.218558 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" podStartSLOduration=1.218538547 podStartE2EDuration="1.218538547s" podCreationTimestamp="2026-01-29 16:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:01.217310994 +0000 UTC m=+228.414667266" watchObservedRunningTime="2026-01-29 16:30:01.218538547 +0000 UTC m=+228.415894809" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.518764 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.530579 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.561824 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles\") pod \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.561885 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert\") pod \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.561913 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhqws\" (UniqueName: \"kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws\") pod \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.561943 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgls8\" (UniqueName: \"kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8\") pod \"846be245-527f-471e-92b8-8c3767283a38\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.561990 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca\") pod \"846be245-527f-471e-92b8-8c3767283a38\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562056 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca\") pod \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562099 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert\") pod \"846be245-527f-471e-92b8-8c3767283a38\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562189 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config\") pod \"846be245-527f-471e-92b8-8c3767283a38\" (UID: \"846be245-527f-471e-92b8-8c3767283a38\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562214 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config\") pod \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\" (UID: \"cc54e59d-23eb-42f6-aa84-ebf20c711f2c\") " Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562837 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca" (OuterVolumeSpecName: "client-ca") pod "846be245-527f-471e-92b8-8c3767283a38" (UID: "846be245-527f-471e-92b8-8c3767283a38"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.562855 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca" (OuterVolumeSpecName: "client-ca") pod "cc54e59d-23eb-42f6-aa84-ebf20c711f2c" (UID: "cc54e59d-23eb-42f6-aa84-ebf20c711f2c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.563042 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "cc54e59d-23eb-42f6-aa84-ebf20c711f2c" (UID: "cc54e59d-23eb-42f6-aa84-ebf20c711f2c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.563069 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config" (OuterVolumeSpecName: "config") pod "cc54e59d-23eb-42f6-aa84-ebf20c711f2c" (UID: "cc54e59d-23eb-42f6-aa84-ebf20c711f2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.563580 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config" (OuterVolumeSpecName: "config") pod "846be245-527f-471e-92b8-8c3767283a38" (UID: "846be245-527f-471e-92b8-8c3767283a38"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.574273 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "846be245-527f-471e-92b8-8c3767283a38" (UID: "846be245-527f-471e-92b8-8c3767283a38"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.578644 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8" (OuterVolumeSpecName: "kube-api-access-pgls8") pod "846be245-527f-471e-92b8-8c3767283a38" (UID: "846be245-527f-471e-92b8-8c3767283a38"). InnerVolumeSpecName "kube-api-access-pgls8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.578768 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws" (OuterVolumeSpecName: "kube-api-access-qhqws") pod "cc54e59d-23eb-42f6-aa84-ebf20c711f2c" (UID: "cc54e59d-23eb-42f6-aa84-ebf20c711f2c"). InnerVolumeSpecName "kube-api-access-qhqws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.588773 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cc54e59d-23eb-42f6-aa84-ebf20c711f2c" (UID: "cc54e59d-23eb-42f6-aa84-ebf20c711f2c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:30:01 crc kubenswrapper[4685]: E0129 16:30:01.606144 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:30:01 crc kubenswrapper[4685]: E0129 16:30:01.607905 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663666 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663698 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663709 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663719 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663729 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhqws\" (UniqueName: \"kubernetes.io/projected/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-kube-api-access-qhqws\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663738 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgls8\" (UniqueName: \"kubernetes.io/projected/846be245-527f-471e-92b8-8c3767283a38-kube-api-access-pgls8\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663746 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/846be245-527f-471e-92b8-8c3767283a38-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663753 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc54e59d-23eb-42f6-aa84-ebf20c711f2c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:01 crc kubenswrapper[4685]: I0129 16:30:01.663761 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/846be245-527f-471e-92b8-8c3767283a38-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.206414 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" event={"ID":"cc54e59d-23eb-42f6-aa84-ebf20c711f2c","Type":"ContainerDied","Data":"db6733569fe0cb3348ca02beeff32a1b2518ffdb689bcb740f6daa8cb2bae3b0"} Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.206462 4685 scope.go:117] "RemoveContainer" containerID="65cf25947ac77232791ed80b126e2916dc9844ebdd5dd70bfa1bae730cb7c9ca" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.206473 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-549fb6dd65-26wgw" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.210539 4685 generic.go:334] "Generic (PLEG): container finished" podID="93a4a943-8dfb-4e88-ba12-d0b3f49e4590" containerID="32bfd06e153ade4241df77851151e8669cda7ce6f118fd59b65ba9776c65efa7" exitCode=0 Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.210599 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" event={"ID":"93a4a943-8dfb-4e88-ba12-d0b3f49e4590","Type":"ContainerDied","Data":"32bfd06e153ade4241df77851151e8669cda7ce6f118fd59b65ba9776c65efa7"} Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.212710 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" event={"ID":"846be245-527f-471e-92b8-8c3767283a38","Type":"ContainerDied","Data":"bef0eec3f180d6701ffa4676f9a73cbdcac8438d442fe6aafd78a2e6d68d930d"} Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.212813 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.243127 4685 scope.go:117] "RemoveContainer" containerID="3f093cb5458d8f944fbde838fd564916cef2d8b58df41c1e936e16852c264873" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.252997 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.257109 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb4f98fc6-4p85s"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.261720 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.264791 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-549fb6dd65-26wgw"] Jan 29 16:30:02 crc kubenswrapper[4685]: E0129 16:30:02.604330 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.798913 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-797f94f54c-rnfc8"] Jan 29 16:30:02 crc kubenswrapper[4685]: E0129 16:30:02.799500 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846be245-527f-471e-92b8-8c3767283a38" containerName="route-controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.799524 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="846be245-527f-471e-92b8-8c3767283a38" containerName="route-controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: E0129 16:30:02.799547 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" containerName="controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.799555 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" containerName="controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.799880 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" containerName="controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.799895 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="846be245-527f-471e-92b8-8c3767283a38" containerName="route-controller-manager" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.800520 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.803773 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.805126 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.805501 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.807549 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.818980 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.819069 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.819917 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.820520 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.820678 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.821945 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.822342 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.822887 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.822974 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.823021 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.825964 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.832913 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.839107 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-797f94f54c-rnfc8"] Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879105 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf996\" (UniqueName: \"kubernetes.io/projected/0611a6d4-4df0-4eb5-97f1-a2526c39452e-kube-api-access-mf996\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879184 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcae854b-f57f-498c-92bb-5bc7114d6eb0-serving-cert\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879219 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-client-ca\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879312 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t97t\" (UniqueName: \"kubernetes.io/projected/bcae854b-f57f-498c-92bb-5bc7114d6eb0-kube-api-access-2t97t\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879355 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-config\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879379 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-proxy-ca-bundles\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879430 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0611a6d4-4df0-4eb5-97f1-a2526c39452e-serving-cert\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879473 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-client-ca\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.879500 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-config\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980309 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0611a6d4-4df0-4eb5-97f1-a2526c39452e-serving-cert\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980367 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-client-ca\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980406 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-config\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980429 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf996\" (UniqueName: \"kubernetes.io/projected/0611a6d4-4df0-4eb5-97f1-a2526c39452e-kube-api-access-mf996\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980456 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcae854b-f57f-498c-92bb-5bc7114d6eb0-serving-cert\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980472 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-client-ca\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980514 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t97t\" (UniqueName: \"kubernetes.io/projected/bcae854b-f57f-498c-92bb-5bc7114d6eb0-kube-api-access-2t97t\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980531 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-config\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.980546 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-proxy-ca-bundles\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.981516 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-proxy-ca-bundles\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.984073 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-client-ca\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.984465 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-config\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.984588 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0611a6d4-4df0-4eb5-97f1-a2526c39452e-client-ca\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.985606 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcae854b-f57f-498c-92bb-5bc7114d6eb0-config\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:02 crc kubenswrapper[4685]: I0129 16:30:02.991166 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0611a6d4-4df0-4eb5-97f1-a2526c39452e-serving-cert\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.003232 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcae854b-f57f-498c-92bb-5bc7114d6eb0-serving-cert\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.004302 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t97t\" (UniqueName: \"kubernetes.io/projected/bcae854b-f57f-498c-92bb-5bc7114d6eb0-kube-api-access-2t97t\") pod \"controller-manager-797f94f54c-rnfc8\" (UID: \"bcae854b-f57f-498c-92bb-5bc7114d6eb0\") " pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.005242 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf996\" (UniqueName: \"kubernetes.io/projected/0611a6d4-4df0-4eb5-97f1-a2526c39452e-kube-api-access-mf996\") pod \"route-controller-manager-588bb7d8f-8hc4m\" (UID: \"0611a6d4-4df0-4eb5-97f1-a2526c39452e\") " pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.140054 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.146802 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.564774 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-797f94f54c-rnfc8"] Jan 29 16:30:03 crc kubenswrapper[4685]: W0129 16:30:03.569605 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcae854b_f57f_498c_92bb_5bc7114d6eb0.slice/crio-bd8ef5d453f3e1b326528662d25a5ddc0fc0aafe6647fb84c1a30954db8e582a WatchSource:0}: Error finding container bd8ef5d453f3e1b326528662d25a5ddc0fc0aafe6647fb84c1a30954db8e582a: Status 404 returned error can't find the container with id bd8ef5d453f3e1b326528662d25a5ddc0fc0aafe6647fb84c1a30954db8e582a Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.571139 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m"] Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.579405 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.588782 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume\") pod \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.588823 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pdpn\" (UniqueName: \"kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn\") pod \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.588954 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume\") pod \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\" (UID: \"93a4a943-8dfb-4e88-ba12-d0b3f49e4590\") " Jan 29 16:30:03 crc kubenswrapper[4685]: W0129 16:30:03.589229 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0611a6d4_4df0_4eb5_97f1_a2526c39452e.slice/crio-e4cf2df2bc4f65cf5418de6a7b815eaea6616a195f2b3260b92b1b03f9ffdb6b WatchSource:0}: Error finding container e4cf2df2bc4f65cf5418de6a7b815eaea6616a195f2b3260b92b1b03f9ffdb6b: Status 404 returned error can't find the container with id e4cf2df2bc4f65cf5418de6a7b815eaea6616a195f2b3260b92b1b03f9ffdb6b Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.589633 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume" (OuterVolumeSpecName: "config-volume") pod "93a4a943-8dfb-4e88-ba12-d0b3f49e4590" (UID: "93a4a943-8dfb-4e88-ba12-d0b3f49e4590"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.595502 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "93a4a943-8dfb-4e88-ba12-d0b3f49e4590" (UID: "93a4a943-8dfb-4e88-ba12-d0b3f49e4590"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.595803 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn" (OuterVolumeSpecName: "kube-api-access-8pdpn") pod "93a4a943-8dfb-4e88-ba12-d0b3f49e4590" (UID: "93a4a943-8dfb-4e88-ba12-d0b3f49e4590"). InnerVolumeSpecName "kube-api-access-8pdpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.622618 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846be245-527f-471e-92b8-8c3767283a38" path="/var/lib/kubelet/pods/846be245-527f-471e-92b8-8c3767283a38/volumes" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.624741 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc54e59d-23eb-42f6-aa84-ebf20c711f2c" path="/var/lib/kubelet/pods/cc54e59d-23eb-42f6-aa84-ebf20c711f2c/volumes" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.690369 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-config-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.690455 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:03 crc kubenswrapper[4685]: I0129 16:30:03.690468 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pdpn\" (UniqueName: \"kubernetes.io/projected/93a4a943-8dfb-4e88-ba12-d0b3f49e4590-kube-api-access-8pdpn\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.235627 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" event={"ID":"0611a6d4-4df0-4eb5-97f1-a2526c39452e","Type":"ContainerStarted","Data":"d4d00dfec125367ef43ffb33c2a044b9fa4cf2e4f44443c7a276dca64fc4ba4a"} Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.235668 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" event={"ID":"0611a6d4-4df0-4eb5-97f1-a2526c39452e","Type":"ContainerStarted","Data":"e4cf2df2bc4f65cf5418de6a7b815eaea6616a195f2b3260b92b1b03f9ffdb6b"} Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.236096 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.237579 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" event={"ID":"93a4a943-8dfb-4e88-ba12-d0b3f49e4590","Type":"ContainerDied","Data":"db4f04b37a2fc53d27d836bf8c3147c4e0d1ec6a80c0d1364531b64d252a52d9"} Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.237605 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db4f04b37a2fc53d27d836bf8c3147c4e0d1ec6a80c0d1364531b64d252a52d9" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.237626 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495070-cgfgt" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.239586 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" event={"ID":"bcae854b-f57f-498c-92bb-5bc7114d6eb0","Type":"ContainerStarted","Data":"3c846645ebf5aca6acbb81ade6fa7e3339eb60fdfd8f87bc5a9931e9f27e5dea"} Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.239632 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" event={"ID":"bcae854b-f57f-498c-92bb-5bc7114d6eb0","Type":"ContainerStarted","Data":"bd8ef5d453f3e1b326528662d25a5ddc0fc0aafe6647fb84c1a30954db8e582a"} Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.239823 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.253564 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.254151 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.256783 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-588bb7d8f-8hc4m" podStartSLOduration=3.256761427 podStartE2EDuration="3.256761427s" podCreationTimestamp="2026-01-29 16:30:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:04.253040004 +0000 UTC m=+231.450396266" watchObservedRunningTime="2026-01-29 16:30:04.256761427 +0000 UTC m=+231.454117689" Jan 29 16:30:04 crc kubenswrapper[4685]: I0129 16:30:04.274159 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-797f94f54c-rnfc8" podStartSLOduration=4.274144555 podStartE2EDuration="4.274144555s" podCreationTimestamp="2026-01-29 16:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:04.273168821 +0000 UTC m=+231.470525093" watchObservedRunningTime="2026-01-29 16:30:04.274144555 +0000 UTC m=+231.471500817" Jan 29 16:30:04 crc kubenswrapper[4685]: E0129 16:30:04.603347 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:30:04 crc kubenswrapper[4685]: E0129 16:30:04.603430 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:30:05 crc kubenswrapper[4685]: E0129 16:30:05.604921 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.175266 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.175704 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a4a943-8dfb-4e88-ba12-d0b3f49e4590" containerName="collect-profiles" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.175735 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a4a943-8dfb-4e88-ba12-d0b3f49e4590" containerName="collect-profiles" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.175982 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="93a4a943-8dfb-4e88-ba12-d0b3f49e4590" containerName="collect-profiles" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.176637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183171 4685 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183236 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183572 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183605 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183626 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183641 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183671 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183689 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183707 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183719 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183734 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183746 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183764 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183777 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.183795 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183806 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183967 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.183989 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184007 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184021 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184043 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184060 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.184262 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184290 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.184518 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.185981 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462" gracePeriod=15 Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.186677 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3" gracePeriod=15 Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.186866 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab" gracePeriod=15 Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.186887 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad" gracePeriod=15 Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.186944 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6" gracePeriod=15 Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.220234 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332510 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332564 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332648 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332672 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332723 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332755 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.332777 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434264 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434505 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434531 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434546 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434439 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434611 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434635 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434669 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434725 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434739 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434799 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434825 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434828 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434855 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.434887 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: I0129 16:30:07.514522 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:07 crc kubenswrapper[4685]: W0129 16:30:07.533021 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-df20207ae9c8b9de42c10a03be0c63fe99ce9544ebe2fb0c9139d56658904666 WatchSource:0}: Error finding container df20207ae9c8b9de42c10a03be0c63fe99ce9544ebe2fb0c9139d56658904666: Status 404 returned error can't find the container with id df20207ae9c8b9de42c10a03be0c63fe99ce9544ebe2fb0c9139d56658904666 Jan 29 16:30:07 crc kubenswrapper[4685]: E0129 16:30:07.535982 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f4099b7a386df openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-29 16:30:07.535015647 +0000 UTC m=+234.732371919,LastTimestamp:2026-01-29 16:30:07.535015647 +0000 UTC m=+234.732371919,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.259353 4685 generic.go:334] "Generic (PLEG): container finished" podID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" containerID="0706e6ec01f763a1f2ea147749ba8bb951a4d691b25108d0566edb083a7b8b2e" exitCode=0 Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.259459 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b2572ad-c614-42ee-88c2-6e8670ba17f8","Type":"ContainerDied","Data":"0706e6ec01f763a1f2ea147749ba8bb951a4d691b25108d0566edb083a7b8b2e"} Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.260823 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.261166 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.262074 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.263610 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.264233 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3" exitCode=0 Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.264254 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab" exitCode=0 Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.264265 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad" exitCode=0 Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.264276 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6" exitCode=2 Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.264373 4685 scope.go:117] "RemoveContainer" containerID="4384141be4e9e4a6ef06d15f771bb0be1bce8d98b90975bf322aa8c832ee272b" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.265516 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f"} Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.265555 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"df20207ae9c8b9de42c10a03be0c63fe99ce9544ebe2fb0c9139d56658904666"} Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.266139 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:08 crc kubenswrapper[4685]: I0129 16:30:08.266576 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:08 crc kubenswrapper[4685]: E0129 16:30:08.721567 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:30:08 crc kubenswrapper[4685]: E0129 16:30:08.721786 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljn4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9wff8_openshift-marketplace(c9eff178-9b35-42e8-a94d-ecb975269482): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:08 crc kubenswrapper[4685]: E0129 16:30:08.723010 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.289415 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.776711 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.777205 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.777357 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.899950 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access\") pod \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900328 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock\") pod \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900349 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock" (OuterVolumeSpecName: "var-lock") pod "4b2572ad-c614-42ee-88c2-6e8670ba17f8" (UID: "4b2572ad-c614-42ee-88c2-6e8670ba17f8"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900400 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir\") pod \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\" (UID: \"4b2572ad-c614-42ee-88c2-6e8670ba17f8\") " Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900509 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4b2572ad-c614-42ee-88c2-6e8670ba17f8" (UID: "4b2572ad-c614-42ee-88c2-6e8670ba17f8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900592 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.900604 4685 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b2572ad-c614-42ee-88c2-6e8670ba17f8-var-lock\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:09 crc kubenswrapper[4685]: I0129 16:30:09.905071 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4b2572ad-c614-42ee-88c2-6e8670ba17f8" (UID: "4b2572ad-c614-42ee-88c2-6e8670ba17f8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.002038 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b2572ad-c614-42ee-88c2-6e8670ba17f8-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.145949 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.147125 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.147582 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.147989 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.148306 4685 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.297437 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.297433 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b2572ad-c614-42ee-88c2-6e8670ba17f8","Type":"ContainerDied","Data":"079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737"} Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.297766 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="079ac309498ab23350d57edb0fd2e03b0de4e683866f02dddc62575846703737" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.299976 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.300599 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462" exitCode=0 Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.300663 4685 scope.go:117] "RemoveContainer" containerID="f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.300693 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305048 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305253 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305351 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305538 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305436 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305644 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305785 4685 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305808 4685 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.305816 4685 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.310701 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.311462 4685 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.311822 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.318412 4685 scope.go:117] "RemoveContainer" containerID="50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.337585 4685 scope.go:117] "RemoveContainer" containerID="f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.355655 4685 scope.go:117] "RemoveContainer" containerID="88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.366950 4685 scope.go:117] "RemoveContainer" containerID="254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.381086 4685 scope.go:117] "RemoveContainer" containerID="6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.399432 4685 scope.go:117] "RemoveContainer" containerID="f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.399823 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\": container with ID starting with f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3 not found: ID does not exist" containerID="f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.399861 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3"} err="failed to get container status \"f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\": rpc error: code = NotFound desc = could not find container \"f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3\": container with ID starting with f1ddf18b59368b97be7dd0fea6fae71dee7f7ae68f6e32ea46ed9c2961eae4f3 not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.399887 4685 scope.go:117] "RemoveContainer" containerID="50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.400771 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\": container with ID starting with 50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab not found: ID does not exist" containerID="50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.400818 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab"} err="failed to get container status \"50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\": rpc error: code = NotFound desc = could not find container \"50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab\": container with ID starting with 50190f3e73328b776243c87a024161e07a79dbdb5a8a43762658cfe3a0e228ab not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.400834 4685 scope.go:117] "RemoveContainer" containerID="f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.401075 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\": container with ID starting with f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad not found: ID does not exist" containerID="f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401094 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad"} err="failed to get container status \"f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\": rpc error: code = NotFound desc = could not find container \"f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad\": container with ID starting with f9ad47858faad6f89f5598410ff46dda4c7ca0cdaa817028cfe3cabee96495ad not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401108 4685 scope.go:117] "RemoveContainer" containerID="88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.401466 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\": container with ID starting with 88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6 not found: ID does not exist" containerID="88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401486 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6"} err="failed to get container status \"88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\": rpc error: code = NotFound desc = could not find container \"88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6\": container with ID starting with 88afb091c71bdb4d065b55b73b8f141d2c8e4a438617e3b2e7c0b5f9833fe4c6 not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401554 4685 scope.go:117] "RemoveContainer" containerID="254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.401756 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\": container with ID starting with 254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462 not found: ID does not exist" containerID="254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401778 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462"} err="failed to get container status \"254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\": rpc error: code = NotFound desc = could not find container \"254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462\": container with ID starting with 254bf73180b1ef36cc0354278086edac15add8b26fa17115f0abc79b6d5fd462 not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401790 4685 scope.go:117] "RemoveContainer" containerID="6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.401951 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\": container with ID starting with 6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1 not found: ID does not exist" containerID="6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.401969 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1"} err="failed to get container status \"6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\": rpc error: code = NotFound desc = could not find container \"6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1\": container with ID starting with 6d9895fc38f1b99b2f46cd3da749e0059b34a701a90df2a7138e826ac1e921b1 not found: ID does not exist" Jan 29 16:30:10 crc kubenswrapper[4685]: E0129 16:30:10.603105 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.623394 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.623710 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:10 crc kubenswrapper[4685]: I0129 16:30:10.624054 4685 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:11 crc kubenswrapper[4685]: I0129 16:30:11.610382 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.349637 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f4099b7a386df openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-29 16:30:07.535015647 +0000 UTC m=+234.732371919,LastTimestamp:2026-01-29 16:30:07.535015647 +0000 UTC m=+234.732371919,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 29 16:30:13 crc kubenswrapper[4685]: I0129 16:30:13.605316 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: I0129 16:30:13.605682 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.659753 4685 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.660135 4685 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.660477 4685 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.660800 4685 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.661151 4685 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:13 crc kubenswrapper[4685]: I0129 16:30:13.661194 4685 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.661484 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="200ms" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.736564 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.736715 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ztg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ksvjv_openshift-marketplace(b1edc1c3-3a71-4117-bf81-b3e95b8d2230): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.737784 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:30:13 crc kubenswrapper[4685]: E0129 16:30:13.862459 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="400ms" Jan 29 16:30:14 crc kubenswrapper[4685]: E0129 16:30:14.263009 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="800ms" Jan 29 16:30:15 crc kubenswrapper[4685]: E0129 16:30:15.064082 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="1.6s" Jan 29 16:30:15 crc kubenswrapper[4685]: E0129 16:30:15.730893 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:30:15 crc kubenswrapper[4685]: E0129 16:30:15.731070 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zxbbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x5cm_openshift-marketplace(48e58904-04a7-4e69-91a4-61118022ec0b): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:15 crc kubenswrapper[4685]: E0129 16:30:15.732292 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.664898 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="3.2s" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.734628 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.734911 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkfdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8h7mh_openshift-marketplace(955e6277-1f7d-4d59-8987-3618ed745fc5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.735023 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.735126 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg486,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k8cws_openshift-marketplace(13f56e43-a938-4682-9b5a-14fe7af129e5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.735477 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.735587 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nl2j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b7nt8_openshift-marketplace(907873d4-fae4-4bc1-b7ff-a56b329f47a8): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.736747 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.736806 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:30:16 crc kubenswrapper[4685]: E0129 16:30:16.736833 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.601154 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.603503 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.604245 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.625799 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.626085 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:19 crc kubenswrapper[4685]: E0129 16:30:19.626599 4685 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:19 crc kubenswrapper[4685]: I0129 16:30:19.627294 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:19 crc kubenswrapper[4685]: W0129 16:30:19.651823 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-be1635b1a433d2b887749f08e00fd6b94286a7fa6b7f7cbba1d4af70c18c0b3c WatchSource:0}: Error finding container be1635b1a433d2b887749f08e00fd6b94286a7fa6b7f7cbba1d4af70c18c0b3c: Status 404 returned error can't find the container with id be1635b1a433d2b887749f08e00fd6b94286a7fa6b7f7cbba1d4af70c18c0b3c Jan 29 16:30:19 crc kubenswrapper[4685]: E0129 16:30:19.734553 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:30:19 crc kubenswrapper[4685]: E0129 16:30:19.734717 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjpfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pk7q7_openshift-marketplace(e14e2172-5fc7-4b65-90fe-4060e571eee5): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:19 crc kubenswrapper[4685]: E0129 16:30:19.735949 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:30:19 crc kubenswrapper[4685]: E0129 16:30:19.865709 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="6.4s" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.360788 4685 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="7d459edb90570bead1119c1f5bff9987d3fb448043b7d8519e1295d3baebafdd" exitCode=0 Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.360887 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"7d459edb90570bead1119c1f5bff9987d3fb448043b7d8519e1295d3baebafdd"} Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.360926 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"be1635b1a433d2b887749f08e00fd6b94286a7fa6b7f7cbba1d4af70c18c0b3c"} Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.361251 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.361268 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.361932 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: E0129 16:30:20.361997 4685 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.362222 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.365212 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.365266 4685 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe" exitCode=1 Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.365292 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe"} Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.365806 4685 scope.go:117] "RemoveContainer" containerID="1328d4e38b0299abd7595a16d1751a1c24499bbaec5ee8a0e1739d6bd2c08bbe" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.366612 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.367166 4685 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.367823 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.602099 4685 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.602725 4685 status_manager.go:851] "Failed to get status for pod" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.603035 4685 status_manager.go:851] "Failed to get status for pod" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" pod="openshift-marketplace/redhat-operators-9wff8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-9wff8\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: I0129 16:30:20.603324 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Jan 29 16:30:20 crc kubenswrapper[4685]: E0129 16:30:20.603824 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:30:21 crc kubenswrapper[4685]: I0129 16:30:21.372479 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5ae667f607e541931895f446fbd736a025fa54f0920d317cb3aebd9ec8d80df6"} Jan 29 16:30:21 crc kubenswrapper[4685]: I0129 16:30:21.372541 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8e66e7419cc5b2e0e0eb64b42750f9cda1a71d2122d918c91099da96445b269b"} Jan 29 16:30:21 crc kubenswrapper[4685]: I0129 16:30:21.372562 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bd83996fffde32ac869d5233dddf445d1e12cc3c2f4e5047d7138c735a8ae98f"} Jan 29 16:30:21 crc kubenswrapper[4685]: I0129 16:30:21.375847 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 29 16:30:21 crc kubenswrapper[4685]: I0129 16:30:21.375922 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"275e6b62386bb21c5281973cd06ffb08a4ca9617d39148db103326e2df72d51e"} Jan 29 16:30:22 crc kubenswrapper[4685]: I0129 16:30:22.400043 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"880179ddd5a05cc39b75fac2971ba363570fa4f0503688b797018d2c2433975c"} Jan 29 16:30:22 crc kubenswrapper[4685]: I0129 16:30:22.400086 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cb08fd7146b7f49cc1e5996eea465c0f21fd32d897ae5ffe6c0f16012fe3fcb8"} Jan 29 16:30:22 crc kubenswrapper[4685]: I0129 16:30:22.400285 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:22 crc kubenswrapper[4685]: I0129 16:30:22.400368 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:22 crc kubenswrapper[4685]: I0129 16:30:22.400409 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:23 crc kubenswrapper[4685]: I0129 16:30:23.700230 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:30:23 crc kubenswrapper[4685]: I0129 16:30:23.704934 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:30:24 crc kubenswrapper[4685]: I0129 16:30:24.411173 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:30:24 crc kubenswrapper[4685]: I0129 16:30:24.628515 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:24 crc kubenswrapper[4685]: I0129 16:30:24.628652 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:24 crc kubenswrapper[4685]: I0129 16:30:24.633572 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:24 crc kubenswrapper[4685]: E0129 16:30:24.720743 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:30:24 crc kubenswrapper[4685]: E0129 16:30:24.720966 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cm2t8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sjjwk_openshift-marketplace(bcc954ee-61b9-492c-b375-2f271c18201c): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:30:24 crc kubenswrapper[4685]: E0129 16:30:24.722156 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:30:25 crc kubenswrapper[4685]: E0129 16:30:25.603793 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:30:27 crc kubenswrapper[4685]: I0129 16:30:27.413641 4685 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:28 crc kubenswrapper[4685]: I0129 16:30:28.435170 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:28 crc kubenswrapper[4685]: I0129 16:30:28.435200 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:28 crc kubenswrapper[4685]: I0129 16:30:28.440525 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:28 crc kubenswrapper[4685]: I0129 16:30:28.443324 4685 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fdc2ccd5-f0d3-4da8-85f0-15dffd32043c" Jan 29 16:30:28 crc kubenswrapper[4685]: E0129 16:30:28.603505 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:30:29 crc kubenswrapper[4685]: I0129 16:30:29.441124 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:29 crc kubenswrapper[4685]: I0129 16:30:29.441170 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d4635678-889c-4e33-82fc-fb316db41117" Jan 29 16:30:29 crc kubenswrapper[4685]: E0129 16:30:29.603559 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:30:29 crc kubenswrapper[4685]: E0129 16:30:29.603616 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:30:30 crc kubenswrapper[4685]: E0129 16:30:30.602855 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:30:33 crc kubenswrapper[4685]: E0129 16:30:33.616611 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:30:33 crc kubenswrapper[4685]: I0129 16:30:33.637932 4685 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fdc2ccd5-f0d3-4da8-85f0-15dffd32043c" Jan 29 16:30:34 crc kubenswrapper[4685]: E0129 16:30:34.603466 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:30:35 crc kubenswrapper[4685]: I0129 16:30:35.652149 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 29 16:30:36 crc kubenswrapper[4685]: E0129 16:30:36.604298 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:30:36 crc kubenswrapper[4685]: E0129 16:30:36.605100 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:30:36 crc kubenswrapper[4685]: I0129 16:30:36.862525 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 29 16:30:37 crc kubenswrapper[4685]: I0129 16:30:37.624942 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 29 16:30:37 crc kubenswrapper[4685]: I0129 16:30:37.804731 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 29 16:30:37 crc kubenswrapper[4685]: I0129 16:30:37.849944 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 29 16:30:38 crc kubenswrapper[4685]: I0129 16:30:38.159210 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 29 16:30:38 crc kubenswrapper[4685]: I0129 16:30:38.336484 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 29 16:30:38 crc kubenswrapper[4685]: I0129 16:30:38.419173 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 29 16:30:38 crc kubenswrapper[4685]: I0129 16:30:38.765291 4685 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 29 16:30:38 crc kubenswrapper[4685]: I0129 16:30:38.951206 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.061158 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.080931 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.115546 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.194658 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.259537 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.721930 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.831522 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.933322 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 29 16:30:39 crc kubenswrapper[4685]: I0129 16:30:39.967373 4685 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.020632 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.120942 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.220533 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.405902 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.447375 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 29 16:30:40 crc kubenswrapper[4685]: E0129 16:30:40.605545 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x5cm" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" Jan 29 16:30:40 crc kubenswrapper[4685]: I0129 16:30:40.779243 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.083951 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.146704 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.154563 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.155635 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.355421 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 29 16:30:41 crc kubenswrapper[4685]: E0129 16:30:41.604314 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b7nt8" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.627917 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.641093 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.656161 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.668460 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.760817 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.800220 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.937849 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 29 16:30:41 crc kubenswrapper[4685]: I0129 16:30:41.954873 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.121338 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.126272 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.151025 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.152649 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.248818 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.298147 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.333268 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.334319 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.342616 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.352046 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.420467 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.530861 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.541077 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.559565 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.618296 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.622973 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.666591 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.689619 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.736475 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.773944 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.897624 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 29 16:30:42 crc kubenswrapper[4685]: I0129 16:30:42.998238 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.032139 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.090826 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.156069 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.177804 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.208500 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.208789 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.365298 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.380713 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.468665 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.530925 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.556175 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.628049 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.758114 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.774069 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.859827 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.896356 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.930501 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.938142 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 29 16:30:43 crc kubenswrapper[4685]: I0129 16:30:43.969552 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.003284 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.126581 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.127302 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.127794 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.166339 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.306334 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.332584 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.397820 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.529855 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 29 16:30:44 crc kubenswrapper[4685]: E0129 16:30:44.603236 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8h7mh" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.615669 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.651492 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.776806 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.784212 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.788621 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.802310 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.825881 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.871673 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.878330 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.916842 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.967145 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 29 16:30:44 crc kubenswrapper[4685]: I0129 16:30:44.996698 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.085132 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.142215 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.181886 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.206442 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.247335 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.251467 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.284756 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.364967 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.440761 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.506328 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.511839 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.573458 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 29 16:30:45 crc kubenswrapper[4685]: E0129 16:30:45.605607 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k8cws" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.608467 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.674067 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.693171 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.697625 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.705222 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.765251 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 29 16:30:45 crc kubenswrapper[4685]: I0129 16:30:45.879422 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.116281 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.225517 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.227580 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.274177 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.312766 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.474985 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.476883 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.544440 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.616648 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.639312 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.639582 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.640811 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.650410 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.693723 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.851677 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.880793 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.973843 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 29 16:30:46 crc kubenswrapper[4685]: I0129 16:30:46.987154 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.078295 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.089856 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.113918 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.156111 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.161157 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.178279 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.243792 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.243900 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.300099 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.377055 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.383789 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.467352 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.471374 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.543038 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.550375 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 29 16:30:47 crc kubenswrapper[4685]: E0129 16:30:47.605020 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pk7q7" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" Jan 29 16:30:47 crc kubenswrapper[4685]: E0129 16:30:47.605020 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9wff8" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.687790 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.690885 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.924246 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.939623 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 29 16:30:47 crc kubenswrapper[4685]: I0129 16:30:47.954687 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.008859 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.010456 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.050059 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.085823 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.163015 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.216952 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.219040 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.273338 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.391639 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.442733 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.508878 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.549437 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.563923 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.636087 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.652102 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.683817 4685 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.685080 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.685059398 podStartE2EDuration="41.685059398s" podCreationTimestamp="2026-01-29 16:30:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:27.064526737 +0000 UTC m=+254.261883029" watchObservedRunningTime="2026-01-29 16:30:48.685059398 +0000 UTC m=+275.882415680" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.688909 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.688971 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.693330 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.693824 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.706940 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.706925023 podStartE2EDuration="21.706925023s" podCreationTimestamp="2026-01-29 16:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:48.705911772 +0000 UTC m=+275.903268034" watchObservedRunningTime="2026-01-29 16:30:48.706925023 +0000 UTC m=+275.904281285" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.728110 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.768671 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.774481 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 29 16:30:48 crc kubenswrapper[4685]: I0129 16:30:48.918918 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:48.924681 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:48.938658 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:48.951057 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:48.969379 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:48.971835 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.023688 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.045346 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.046114 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.050995 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.089980 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.104284 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.211891 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.223859 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.252926 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.283568 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.298141 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.485905 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.586831 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: E0129 16:30:49.602753 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sjjwk" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" Jan 29 16:30:49 crc kubenswrapper[4685]: E0129 16:30:49.602751 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ksvjv" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.709950 4685 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.711481 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f" gracePeriod=5 Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.755873 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.777029 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.802445 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.817689 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 29 16:30:49 crc kubenswrapper[4685]: I0129 16:30:49.855229 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.003218 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.153034 4685 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.232649 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.247187 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.393807 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.413502 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.473782 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.634884 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.644569 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.839566 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.946001 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 29 16:30:50 crc kubenswrapper[4685]: I0129 16:30:50.960223 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.048491 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.062115 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.071968 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.240024 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.424853 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.447801 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.497153 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.504814 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.630272 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.735189 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.804958 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.846998 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.895778 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.916829 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 29 16:30:51 crc kubenswrapper[4685]: I0129 16:30:51.975462 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.024382 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.033773 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.046001 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.068486 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.106350 4685 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.176871 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.330549 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.355192 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.543581 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.635196 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.637151 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.641016 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.720530 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.729114 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.741817 4685 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.752716 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.823481 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.831076 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.851252 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.857694 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.863161 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.863581 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" containerID="cri-o://423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03" gracePeriod=30 Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.872704 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.879876 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.888777 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.897739 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.908918 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:30:52 crc kubenswrapper[4685]: E0129 16:30:52.909132 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.909144 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 29 16:30:52 crc kubenswrapper[4685]: E0129 16:30:52.909153 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" containerName="installer" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.909161 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" containerName="installer" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.909280 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b2572ad-c614-42ee-88c2-6e8670ba17f8" containerName="installer" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.909290 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.909667 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.919729 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.984674 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9drz\" (UniqueName: \"kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.984739 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:52 crc kubenswrapper[4685]: I0129 16:30:52.984782 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.082858 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.085932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9drz\" (UniqueName: \"kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.085992 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.086021 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.092647 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.093227 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.101269 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9drz\" (UniqueName: \"kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz\") pod \"marketplace-operator-79b997595-8blbd\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.209747 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.241660 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.289570 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities\") pod \"e14e2172-5fc7-4b65-90fe-4060e571eee5\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.289644 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjpfl\" (UniqueName: \"kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl\") pod \"e14e2172-5fc7-4b65-90fe-4060e571eee5\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.289690 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content\") pod \"e14e2172-5fc7-4b65-90fe-4060e571eee5\" (UID: \"e14e2172-5fc7-4b65-90fe-4060e571eee5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.292428 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e14e2172-5fc7-4b65-90fe-4060e571eee5" (UID: "e14e2172-5fc7-4b65-90fe-4060e571eee5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.293899 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities" (OuterVolumeSpecName: "utilities") pod "e14e2172-5fc7-4b65-90fe-4060e571eee5" (UID: "e14e2172-5fc7-4b65-90fe-4060e571eee5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.297902 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl" (OuterVolumeSpecName: "kube-api-access-sjpfl") pod "e14e2172-5fc7-4b65-90fe-4060e571eee5" (UID: "e14e2172-5fc7-4b65-90fe-4060e571eee5"). InnerVolumeSpecName "kube-api-access-sjpfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.323655 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.362657 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.369428 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.376757 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.391577 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.391609 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjpfl\" (UniqueName: \"kubernetes.io/projected/e14e2172-5fc7-4b65-90fe-4060e571eee5-kube-api-access-sjpfl\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.391620 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e14e2172-5fc7-4b65-90fe-4060e571eee5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.401128 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.408356 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.421208 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.429686 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492253 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities\") pod \"c9eff178-9b35-42e8-a94d-ecb975269482\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492318 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl2j7\" (UniqueName: \"kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7\") pod \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492365 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljn4n\" (UniqueName: \"kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n\") pod \"c9eff178-9b35-42e8-a94d-ecb975269482\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492437 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content\") pod \"13f56e43-a938-4682-9b5a-14fe7af129e5\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492484 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxbbn\" (UniqueName: \"kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn\") pod \"48e58904-04a7-4e69-91a4-61118022ec0b\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494313 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkfdq\" (UniqueName: \"kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq\") pod \"955e6277-1f7d-4d59-8987-3618ed745fc5\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494371 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities\") pod \"13f56e43-a938-4682-9b5a-14fe7af129e5\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494434 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities\") pod \"955e6277-1f7d-4d59-8987-3618ed745fc5\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494468 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities\") pod \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494515 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content\") pod \"48e58904-04a7-4e69-91a4-61118022ec0b\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494544 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg486\" (UniqueName: \"kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486\") pod \"13f56e43-a938-4682-9b5a-14fe7af129e5\" (UID: \"13f56e43-a938-4682-9b5a-14fe7af129e5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494574 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content\") pod \"955e6277-1f7d-4d59-8987-3618ed745fc5\" (UID: \"955e6277-1f7d-4d59-8987-3618ed745fc5\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494630 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities\") pod \"48e58904-04a7-4e69-91a4-61118022ec0b\" (UID: \"48e58904-04a7-4e69-91a4-61118022ec0b\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494656 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content\") pod \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\" (UID: \"907873d4-fae4-4bc1-b7ff-a56b329f47a8\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.494695 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content\") pod \"c9eff178-9b35-42e8-a94d-ecb975269482\" (UID: \"c9eff178-9b35-42e8-a94d-ecb975269482\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.492862 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13f56e43-a938-4682-9b5a-14fe7af129e5" (UID: "13f56e43-a938-4682-9b5a-14fe7af129e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.493884 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities" (OuterVolumeSpecName: "utilities") pod "c9eff178-9b35-42e8-a94d-ecb975269482" (UID: "c9eff178-9b35-42e8-a94d-ecb975269482"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.495469 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9eff178-9b35-42e8-a94d-ecb975269482" (UID: "c9eff178-9b35-42e8-a94d-ecb975269482"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.496446 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48e58904-04a7-4e69-91a4-61118022ec0b" (UID: "48e58904-04a7-4e69-91a4-61118022ec0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.496825 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n" (OuterVolumeSpecName: "kube-api-access-ljn4n") pod "c9eff178-9b35-42e8-a94d-ecb975269482" (UID: "c9eff178-9b35-42e8-a94d-ecb975269482"). InnerVolumeSpecName "kube-api-access-ljn4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497002 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "955e6277-1f7d-4d59-8987-3618ed745fc5" (UID: "955e6277-1f7d-4d59-8987-3618ed745fc5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497024 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities" (OuterVolumeSpecName: "utilities") pod "907873d4-fae4-4bc1-b7ff-a56b329f47a8" (UID: "907873d4-fae4-4bc1-b7ff-a56b329f47a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497339 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities" (OuterVolumeSpecName: "utilities") pod "955e6277-1f7d-4d59-8987-3618ed745fc5" (UID: "955e6277-1f7d-4d59-8987-3618ed745fc5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497520 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities" (OuterVolumeSpecName: "utilities") pod "13f56e43-a938-4682-9b5a-14fe7af129e5" (UID: "13f56e43-a938-4682-9b5a-14fe7af129e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497663 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities" (OuterVolumeSpecName: "utilities") pod "48e58904-04a7-4e69-91a4-61118022ec0b" (UID: "48e58904-04a7-4e69-91a4-61118022ec0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.497775 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "907873d4-fae4-4bc1-b7ff-a56b329f47a8" (UID: "907873d4-fae4-4bc1-b7ff-a56b329f47a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.499048 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn" (OuterVolumeSpecName: "kube-api-access-zxbbn") pod "48e58904-04a7-4e69-91a4-61118022ec0b" (UID: "48e58904-04a7-4e69-91a4-61118022ec0b"). InnerVolumeSpecName "kube-api-access-zxbbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.500022 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq" (OuterVolumeSpecName: "kube-api-access-wkfdq") pod "955e6277-1f7d-4d59-8987-3618ed745fc5" (UID: "955e6277-1f7d-4d59-8987-3618ed745fc5"). InnerVolumeSpecName "kube-api-access-wkfdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.500039 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7" (OuterVolumeSpecName: "kube-api-access-nl2j7") pod "907873d4-fae4-4bc1-b7ff-a56b329f47a8" (UID: "907873d4-fae4-4bc1-b7ff-a56b329f47a8"). InnerVolumeSpecName "kube-api-access-nl2j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.500064 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486" (OuterVolumeSpecName: "kube-api-access-vg486") pod "13f56e43-a938-4682-9b5a-14fe7af129e5" (UID: "13f56e43-a938-4682-9b5a-14fe7af129e5"). InnerVolumeSpecName "kube-api-access-vg486". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.536495 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.547836 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.587955 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjjwk" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.588493 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjjwk" event={"ID":"bcc954ee-61b9-492c-b375-2f271c18201c","Type":"ContainerDied","Data":"f0d04f478f2c70eb3a821485d87e70c20c99858634e94c926dc4b3884c264bc6"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.588549 4685 scope.go:117] "RemoveContainer" containerID="dea5c6023c049f1f5d1c6844e3a557f11cc53ba23cf00828b09dad8ec3295077" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.591444 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pk7q7" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.591468 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pk7q7" event={"ID":"e14e2172-5fc7-4b65-90fe-4060e571eee5","Type":"ContainerDied","Data":"af7738b393cbdbde2984b9864ed267a23707653e69bc97edbacb45a389d57b6b"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.595045 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7nt8" event={"ID":"907873d4-fae4-4bc1-b7ff-a56b329f47a8","Type":"ContainerDied","Data":"a5bf33f00fe6d3b9ecacf3cff6185d6768cc04fde0f0f99fe0b135d3a00e19cd"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.595109 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7nt8" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.599848 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ztg7\" (UniqueName: \"kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7\") pod \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.599960 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content\") pod \"bcc954ee-61b9-492c-b375-2f271c18201c\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600001 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities\") pod \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600035 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities\") pod \"bcc954ee-61b9-492c-b375-2f271c18201c\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600070 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content\") pod \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\" (UID: \"b1edc1c3-3a71-4117-bf81-b3e95b8d2230\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600106 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm2t8\" (UniqueName: \"kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8\") pod \"bcc954ee-61b9-492c-b375-2f271c18201c\" (UID: \"bcc954ee-61b9-492c-b375-2f271c18201c\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600309 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bcc954ee-61b9-492c-b375-2f271c18201c" (UID: "bcc954ee-61b9-492c-b375-2f271c18201c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600469 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1edc1c3-3a71-4117-bf81-b3e95b8d2230" (UID: "b1edc1c3-3a71-4117-bf81-b3e95b8d2230"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.600887 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities" (OuterVolumeSpecName: "utilities") pod "bcc954ee-61b9-492c-b375-2f271c18201c" (UID: "bcc954ee-61b9-492c-b375-2f271c18201c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601371 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601459 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg486\" (UniqueName: \"kubernetes.io/projected/13f56e43-a938-4682-9b5a-14fe7af129e5-kube-api-access-vg486\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601471 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601481 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48e58904-04a7-4e69-91a4-61118022ec0b-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601491 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601503 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601548 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601559 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9eff178-9b35-42e8-a94d-ecb975269482-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601570 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl2j7\" (UniqueName: \"kubernetes.io/projected/907873d4-fae4-4bc1-b7ff-a56b329f47a8-kube-api-access-nl2j7\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601580 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcc954ee-61b9-492c-b375-2f271c18201c-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601588 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljn4n\" (UniqueName: \"kubernetes.io/projected/c9eff178-9b35-42e8-a94d-ecb975269482-kube-api-access-ljn4n\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601597 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601607 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601616 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxbbn\" (UniqueName: \"kubernetes.io/projected/48e58904-04a7-4e69-91a4-61118022ec0b-kube-api-access-zxbbn\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601624 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkfdq\" (UniqueName: \"kubernetes.io/projected/955e6277-1f7d-4d59-8987-3618ed745fc5-kube-api-access-wkfdq\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601632 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13f56e43-a938-4682-9b5a-14fe7af129e5-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601641 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/955e6277-1f7d-4d59-8987-3618ed745fc5-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601649 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907873d4-fae4-4bc1-b7ff-a56b329f47a8-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.601661 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities" (OuterVolumeSpecName: "utilities") pod "b1edc1c3-3a71-4117-bf81-b3e95b8d2230" (UID: "b1edc1c3-3a71-4117-bf81-b3e95b8d2230"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.603721 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8" (OuterVolumeSpecName: "kube-api-access-cm2t8") pod "bcc954ee-61b9-492c-b375-2f271c18201c" (UID: "bcc954ee-61b9-492c-b375-2f271c18201c"). InnerVolumeSpecName "kube-api-access-cm2t8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.604726 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7" (OuterVolumeSpecName: "kube-api-access-2ztg7") pod "b1edc1c3-3a71-4117-bf81-b3e95b8d2230" (UID: "b1edc1c3-3a71-4117-bf81-b3e95b8d2230"). InnerVolumeSpecName "kube-api-access-2ztg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.607918 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9wff8" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.617733 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9wff8" event={"ID":"c9eff178-9b35-42e8-a94d-ecb975269482","Type":"ContainerDied","Data":"350e005d6c7e397d23aefb939c8ad596f684859c724bef83435aacc0b6be6f01"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.626029 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k8cws" event={"ID":"13f56e43-a938-4682-9b5a-14fe7af129e5","Type":"ContainerDied","Data":"a6a63eb72f4b9c4d9699034de7344a615080163c3ae087337a966ae03d332b26"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.626269 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k8cws" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.626739 4685 scope.go:117] "RemoveContainer" containerID="d88631ecd1bcacf8b695151181838f76e7ebf03c85636dd05d25df8c50ac1bf8" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.632497 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ksvjv" event={"ID":"b1edc1c3-3a71-4117-bf81-b3e95b8d2230","Type":"ContainerDied","Data":"250a1f204f818509a329d6e7842ef2c17a3f85a55d7d1357b4bad70d09fb86fa"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.632583 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ksvjv" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.645616 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.648661 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7nt8"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.652505 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h7mh" event={"ID":"955e6277-1f7d-4d59-8987-3618ed745fc5","Type":"ContainerDied","Data":"a26079c8b8eb59c9af480fab196ead843d2b2f82190ab1c48c2ce1c7d5f28737"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.652618 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h7mh" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.660676 4685 generic.go:334] "Generic (PLEG): container finished" podID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerID="423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03" exitCode=0 Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.660719 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.660767 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" event={"ID":"092013a3-0a24-4a94-ba85-8fe88c4c50d4","Type":"ContainerDied","Data":"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.660841 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rzswn" event={"ID":"092013a3-0a24-4a94-ba85-8fe88c4c50d4","Type":"ContainerDied","Data":"186726ee422b1eaac50cb587951a75d3cd5c61a3346642dc4ec7a8bc8f3873ba"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.662940 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x5cm" event={"ID":"48e58904-04a7-4e69-91a4-61118022ec0b","Type":"ContainerDied","Data":"9c829c04d04c27933b8d5b702b77a17e77c7eb516678f83b43b6c3e53b33ee3a"} Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.663007 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x5cm" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.667976 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.674784 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pk7q7"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.702583 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca\") pod \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.702699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mnqp\" (UniqueName: \"kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp\") pod \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.702776 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics\") pod \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\" (UID: \"092013a3-0a24-4a94-ba85-8fe88c4c50d4\") " Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.703160 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm2t8\" (UniqueName: \"kubernetes.io/projected/bcc954ee-61b9-492c-b375-2f271c18201c-kube-api-access-cm2t8\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.703183 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ztg7\" (UniqueName: \"kubernetes.io/projected/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-kube-api-access-2ztg7\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.703196 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1edc1c3-3a71-4117-bf81-b3e95b8d2230-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.704709 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "092013a3-0a24-4a94-ba85-8fe88c4c50d4" (UID: "092013a3-0a24-4a94-ba85-8fe88c4c50d4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.707102 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp" (OuterVolumeSpecName: "kube-api-access-2mnqp") pod "092013a3-0a24-4a94-ba85-8fe88c4c50d4" (UID: "092013a3-0a24-4a94-ba85-8fe88c4c50d4"). InnerVolumeSpecName "kube-api-access-2mnqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.708150 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "092013a3-0a24-4a94-ba85-8fe88c4c50d4" (UID: "092013a3-0a24-4a94-ba85-8fe88c4c50d4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.729265 4685 scope.go:117] "RemoveContainer" containerID="cae95ba12a90aeec9abc829c8c9e09fcebe5b6081c5169bab39995801b71299a" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.740662 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.745008 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9wff8"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.772079 4685 scope.go:117] "RemoveContainer" containerID="75ab0e93a012173aa2efb8fcae02e95fbf25f13ebbc7df3187a9706fad8bc054" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.786319 4685 scope.go:117] "RemoveContainer" containerID="64c771286458a3fa211c10ce555e549d2189c2e625876360788f9251509191d2" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.803956 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mnqp\" (UniqueName: \"kubernetes.io/projected/092013a3-0a24-4a94-ba85-8fe88c4c50d4-kube-api-access-2mnqp\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.803996 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.804010 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/092013a3-0a24-4a94-ba85-8fe88c4c50d4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.806011 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.811457 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k8cws"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.815890 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.819456 4685 scope.go:117] "RemoveContainer" containerID="978432f84e5e6d28be99ef61d536090b8e94fecd4d2493d9727b9462bab2a5c7" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.831797 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.835028 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ksvjv"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.851267 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.858504 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8h7mh"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.859137 4685 scope.go:117] "RemoveContainer" containerID="79f472e380b2c61b0cbeb9474c602b9e0b68d5179b216d5497dfa8e3c2cad730" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.863449 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.866150 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7x5cm"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.885180 4685 scope.go:117] "RemoveContainer" containerID="423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.908850 4685 scope.go:117] "RemoveContainer" containerID="423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03" Jan 29 16:30:53 crc kubenswrapper[4685]: E0129 16:30:53.909298 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03\": container with ID starting with 423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03 not found: ID does not exist" containerID="423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.909418 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03"} err="failed to get container status \"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03\": rpc error: code = NotFound desc = could not find container \"423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03\": container with ID starting with 423d886f665a228cba306b2668ee7f41adf58b670a3024fe937ce521d059ce03 not found: ID does not exist" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.909515 4685 scope.go:117] "RemoveContainer" containerID="b9cc69678fef537eb8c11fb60556f9a43b340e59e0cfe000bbda94391080b569" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.927036 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.932559 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sjjwk"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.980185 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.993441 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:30:53 crc kubenswrapper[4685]: I0129 16:30:53.998240 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rzswn"] Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.331515 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.592050 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.678317 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" event={"ID":"fa8250b5-b775-4d1a-b7ee-659aab2dc188","Type":"ContainerStarted","Data":"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8"} Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.678380 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" event={"ID":"fa8250b5-b775-4d1a-b7ee-659aab2dc188","Type":"ContainerStarted","Data":"c2cd8d5ddcb2c267684e01f70a11d1c35e7555a9fa229df38f5448eddc83bb97"} Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.679053 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.683083 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.763132 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" podStartSLOduration=2.76310761 podStartE2EDuration="2.76310761s" podCreationTimestamp="2026-01-29 16:30:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:30:54.705775128 +0000 UTC m=+281.903131440" watchObservedRunningTime="2026-01-29 16:30:54.76310761 +0000 UTC m=+281.960463902" Jan 29 16:30:54 crc kubenswrapper[4685]: I0129 16:30:54.765795 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.189243 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.328980 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.329049 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.422878 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.422975 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.422993 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423103 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423175 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423256 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423337 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423371 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423476 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423794 4685 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.423921 4685 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.424000 4685 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.424026 4685 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.430665 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.525366 4685 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.607990 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" path="/var/lib/kubelet/pods/092013a3-0a24-4a94-ba85-8fe88c4c50d4/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.609130 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" path="/var/lib/kubelet/pods/13f56e43-a938-4682-9b5a-14fe7af129e5/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.609982 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" path="/var/lib/kubelet/pods/48e58904-04a7-4e69-91a4-61118022ec0b/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.611289 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" path="/var/lib/kubelet/pods/907873d4-fae4-4bc1-b7ff-a56b329f47a8/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.611946 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" path="/var/lib/kubelet/pods/955e6277-1f7d-4d59-8987-3618ed745fc5/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.612597 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" path="/var/lib/kubelet/pods/b1edc1c3-3a71-4117-bf81-b3e95b8d2230/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.613236 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" path="/var/lib/kubelet/pods/bcc954ee-61b9-492c-b375-2f271c18201c/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.614577 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" path="/var/lib/kubelet/pods/c9eff178-9b35-42e8-a94d-ecb975269482/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.615290 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" path="/var/lib/kubelet/pods/e14e2172-5fc7-4b65-90fe-4060e571eee5/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.615860 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.616182 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.628327 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.628361 4685 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="81b3ff36-6958-43a3-b384-ec7a97318f42" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.631792 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.631816 4685 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="81b3ff36-6958-43a3-b384-ec7a97318f42" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.691432 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.691728 4685 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f" exitCode=137 Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.691850 4685 scope.go:117] "RemoveContainer" containerID="da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.691795 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.708957 4685 scope.go:117] "RemoveContainer" containerID="da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f" Jan 29 16:30:55 crc kubenswrapper[4685]: E0129 16:30:55.709535 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f\": container with ID starting with da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f not found: ID does not exist" containerID="da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f" Jan 29 16:30:55 crc kubenswrapper[4685]: I0129 16:30:55.709580 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f"} err="failed to get container status \"da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f\": rpc error: code = NotFound desc = could not find container \"da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f\": container with ID starting with da773f896cfb70f4ad1fad6dfdf06c25eb41658c6eef251ec27fde520096213f not found: ID does not exist" Jan 29 16:30:57 crc kubenswrapper[4685]: I0129 16:30:57.190841 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 29 16:31:13 crc kubenswrapper[4685]: I0129 16:31:13.364633 4685 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.331090 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.331428 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430148 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-88lp6"] Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430349 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430359 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430369 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430374 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430383 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430407 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430437 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430443 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430453 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430459 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430468 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430473 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430482 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430488 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430495 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430500 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: E0129 16:31:34.430509 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430514 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430592 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="907873d4-fae4-4bc1-b7ff-a56b329f47a8" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430602 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="092013a3-0a24-4a94-ba85-8fe88c4c50d4" containerName="marketplace-operator" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430610 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14e2172-5fc7-4b65-90fe-4060e571eee5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430618 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="955e6277-1f7d-4d59-8987-3618ed745fc5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430625 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc954ee-61b9-492c-b375-2f271c18201c" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430635 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9eff178-9b35-42e8-a94d-ecb975269482" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430643 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1edc1c3-3a71-4117-bf81-b3e95b8d2230" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430651 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="13f56e43-a938-4682-9b5a-14fe7af129e5" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430657 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="48e58904-04a7-4e69-91a4-61118022ec0b" containerName="extract-utilities" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.430983 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.448275 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-88lp6"] Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520246 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520327 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-tls\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520373 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3217c1c-97fa-4491-9016-402dbc5c347b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520418 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2q64\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-kube-api-access-c2q64\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520441 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-bound-sa-token\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520470 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3217c1c-97fa-4491-9016-402dbc5c347b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520499 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-certificates\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.520545 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-trusted-ca\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.538919 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621453 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-tls\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621792 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3217c1c-97fa-4491-9016-402dbc5c347b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621817 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2q64\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-kube-api-access-c2q64\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621834 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-bound-sa-token\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621859 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3217c1c-97fa-4491-9016-402dbc5c347b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621883 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-certificates\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.621915 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-trusted-ca\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.622897 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3217c1c-97fa-4491-9016-402dbc5c347b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.623065 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-trusted-ca\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.623332 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-certificates\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.628011 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3217c1c-97fa-4491-9016-402dbc5c347b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.636264 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-registry-tls\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.644762 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-bound-sa-token\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.644864 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2q64\" (UniqueName: \"kubernetes.io/projected/d3217c1c-97fa-4491-9016-402dbc5c347b-kube-api-access-c2q64\") pod \"image-registry-66df7c8f76-88lp6\" (UID: \"d3217c1c-97fa-4491-9016-402dbc5c347b\") " pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.746735 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:34 crc kubenswrapper[4685]: I0129 16:31:34.976242 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-88lp6"] Jan 29 16:31:34 crc kubenswrapper[4685]: W0129 16:31:34.990627 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3217c1c_97fa_4491_9016_402dbc5c347b.slice/crio-83d5c9f8524e40436f6436ff0bee10e5fd8e28c39a70d908f0d2da8919f94d96 WatchSource:0}: Error finding container 83d5c9f8524e40436f6436ff0bee10e5fd8e28c39a70d908f0d2da8919f94d96: Status 404 returned error can't find the container with id 83d5c9f8524e40436f6436ff0bee10e5fd8e28c39a70d908f0d2da8919f94d96 Jan 29 16:31:35 crc kubenswrapper[4685]: I0129 16:31:35.902005 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" event={"ID":"d3217c1c-97fa-4491-9016-402dbc5c347b","Type":"ContainerStarted","Data":"bc957915f6930e5b26765a0cd681f2041f50332a2e8783bbd65fc1cf045cc460"} Jan 29 16:31:35 crc kubenswrapper[4685]: I0129 16:31:35.902493 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" event={"ID":"d3217c1c-97fa-4491-9016-402dbc5c347b","Type":"ContainerStarted","Data":"83d5c9f8524e40436f6436ff0bee10e5fd8e28c39a70d908f0d2da8919f94d96"} Jan 29 16:31:35 crc kubenswrapper[4685]: I0129 16:31:35.902601 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:35 crc kubenswrapper[4685]: I0129 16:31:35.930922 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" podStartSLOduration=1.930898529 podStartE2EDuration="1.930898529s" podCreationTimestamp="2026-01-29 16:31:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:31:35.925423173 +0000 UTC m=+323.122779475" watchObservedRunningTime="2026-01-29 16:31:35.930898529 +0000 UTC m=+323.128254801" Jan 29 16:31:54 crc kubenswrapper[4685]: I0129 16:31:54.756005 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-88lp6" Jan 29 16:31:54 crc kubenswrapper[4685]: I0129 16:31:54.876490 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:32:04 crc kubenswrapper[4685]: I0129 16:32:04.331006 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:32:04 crc kubenswrapper[4685]: I0129 16:32:04.331308 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.205023 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.207731 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.210550 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.216384 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.383245 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.383350 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.383406 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94hc8\" (UniqueName: \"kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.409048 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.410167 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.413711 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.417807 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.485060 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94hc8\" (UniqueName: \"kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.485193 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.485296 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.486228 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.486338 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.517528 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94hc8\" (UniqueName: \"kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8\") pod \"certified-operators-cxvdf\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.522833 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.586793 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.586883 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsvl9\" (UniqueName: \"kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.586936 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.687845 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.688096 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsvl9\" (UniqueName: \"kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.688153 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.688978 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.689495 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.723610 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsvl9\" (UniqueName: \"kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9\") pod \"redhat-operators-mx7ws\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:18 crc kubenswrapper[4685]: I0129 16:32:18.779516 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.023357 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.164371 4685 generic.go:334] "Generic (PLEG): container finished" podID="37a54549-4f2f-4b92-a1b1-99d418b04e93" containerID="14a921dcd1313c218b53624b234a9aa82ec8adfffd0d01001dcb00413442d45a" exitCode=0 Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.164469 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxvdf" event={"ID":"37a54549-4f2f-4b92-a1b1-99d418b04e93","Type":"ContainerDied","Data":"14a921dcd1313c218b53624b234a9aa82ec8adfffd0d01001dcb00413442d45a"} Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.164502 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxvdf" event={"ID":"37a54549-4f2f-4b92-a1b1-99d418b04e93","Type":"ContainerStarted","Data":"66ff64e001d6e6e811420d883c3fdaf0b201784c37eb8a4c8d5ca3ecc86fe517"} Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.243135 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:32:19 crc kubenswrapper[4685]: W0129 16:32:19.248356 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66f909c7_70dc_44c8_8fc3_22cddf5f9f76.slice/crio-6858bb0485f11d94a77d636355f15c12ccbf1aa1fd3a99f581c7a5a1b705bff1 WatchSource:0}: Error finding container 6858bb0485f11d94a77d636355f15c12ccbf1aa1fd3a99f581c7a5a1b705bff1: Status 404 returned error can't find the container with id 6858bb0485f11d94a77d636355f15c12ccbf1aa1fd3a99f581c7a5a1b705bff1 Jan 29 16:32:19 crc kubenswrapper[4685]: E0129 16:32:19.303093 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:32:19 crc kubenswrapper[4685]: E0129 16:32:19.303214 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94hc8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cxvdf_openshift-marketplace(37a54549-4f2f-4b92-a1b1-99d418b04e93): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:19 crc kubenswrapper[4685]: E0129 16:32:19.304479 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:32:19 crc kubenswrapper[4685]: I0129 16:32:19.917743 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" podUID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" containerName="registry" containerID="cri-o://93e19e1ec631d37ad1f2cd75043991dfe2bfc5c93bcaf5da432272bebe61873f" gracePeriod=30 Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.185741 4685 generic.go:334] "Generic (PLEG): container finished" podID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerID="be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36" exitCode=0 Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.185842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerDied","Data":"be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36"} Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.185911 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerStarted","Data":"6858bb0485f11d94a77d636355f15c12ccbf1aa1fd3a99f581c7a5a1b705bff1"} Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.188041 4685 generic.go:334] "Generic (PLEG): container finished" podID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" containerID="93e19e1ec631d37ad1f2cd75043991dfe2bfc5c93bcaf5da432272bebe61873f" exitCode=0 Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.188121 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" event={"ID":"14cfc19d-44fb-45de-9abc-df6d268d7cb2","Type":"ContainerDied","Data":"93e19e1ec631d37ad1f2cd75043991dfe2bfc5c93bcaf5da432272bebe61873f"} Jan 29 16:32:20 crc kubenswrapper[4685]: E0129 16:32:20.190121 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.300813 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:32:20 crc kubenswrapper[4685]: E0129 16:32:20.326354 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:32:20 crc kubenswrapper[4685]: E0129 16:32:20.326532 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jsvl9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mx7ws_openshift-marketplace(66f909c7-70dc-44c8-8fc3-22cddf5f9f76): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:20 crc kubenswrapper[4685]: E0129 16:32:20.327947 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412223 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412472 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412510 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412563 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412604 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412640 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wdmw\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412674 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.412858 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted\") pod \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\" (UID: \"14cfc19d-44fb-45de-9abc-df6d268d7cb2\") " Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.414292 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.414382 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.419529 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.419554 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.420469 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.423827 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw" (OuterVolumeSpecName: "kube-api-access-2wdmw") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "kube-api-access-2wdmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.431075 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.431194 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "14cfc19d-44fb-45de-9abc-df6d268d7cb2" (UID: "14cfc19d-44fb-45de-9abc-df6d268d7cb2"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515452 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515489 4685 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/14cfc19d-44fb-45de-9abc-df6d268d7cb2-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515505 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wdmw\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-kube-api-access-2wdmw\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515515 4685 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515527 4685 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/14cfc19d-44fb-45de-9abc-df6d268d7cb2-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515538 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.515549 4685 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/14cfc19d-44fb-45de-9abc-df6d268d7cb2-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.601985 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:32:20 crc kubenswrapper[4685]: E0129 16:32:20.602618 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" containerName="registry" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.602755 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" containerName="registry" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.603043 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" containerName="registry" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.604458 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.606167 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.607003 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.718245 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tfh5\" (UniqueName: \"kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.718924 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.718970 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.802036 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.804467 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.807442 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.815759 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.820852 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tfh5\" (UniqueName: \"kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.820932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.820967 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.821431 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.821513 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.859543 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tfh5\" (UniqueName: \"kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5\") pod \"community-operators-pwgss\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.922250 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6z6j\" (UniqueName: \"kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.922310 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.922474 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:20 crc kubenswrapper[4685]: I0129 16:32:20.934106 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.027733 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6z6j\" (UniqueName: \"kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.027799 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.027864 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.028489 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.029108 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.046626 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6z6j\" (UniqueName: \"kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j\") pod \"redhat-marketplace-wx9ts\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.128213 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.139875 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:32:21 crc kubenswrapper[4685]: W0129 16:32:21.146920 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0602efa0_2eae_4bfa_a9fb_b12853c13228.slice/crio-5173a7929946a8c7705c3a25cf3b96c2060fb61d999ffebde462c8a98fd9dfe5 WatchSource:0}: Error finding container 5173a7929946a8c7705c3a25cf3b96c2060fb61d999ffebde462c8a98fd9dfe5: Status 404 returned error can't find the container with id 5173a7929946a8c7705c3a25cf3b96c2060fb61d999ffebde462c8a98fd9dfe5 Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.193224 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwgss" event={"ID":"0602efa0-2eae-4bfa-a9fb-b12853c13228","Type":"ContainerStarted","Data":"5173a7929946a8c7705c3a25cf3b96c2060fb61d999ffebde462c8a98fd9dfe5"} Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.195049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" event={"ID":"14cfc19d-44fb-45de-9abc-df6d268d7cb2","Type":"ContainerDied","Data":"3e0dc9c8866a24fcd4c4c546a474258e25598ca4a6feda90fa929bee9dbfccb1"} Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.195093 4685 scope.go:117] "RemoveContainer" containerID="93e19e1ec631d37ad1f2cd75043991dfe2bfc5c93bcaf5da432272bebe61873f" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.195110 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbgv8" Jan 29 16:32:21 crc kubenswrapper[4685]: E0129 16:32:21.196793 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.245066 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.249063 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbgv8"] Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.539650 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:32:21 crc kubenswrapper[4685]: W0129 16:32:21.547299 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32247aeb_b5a0_4727_ae80_88aa6b9209f3.slice/crio-bedf56e18020aef6465bf621a807dbdb813b889004ea65cb6d4ecee162c0bf8d WatchSource:0}: Error finding container bedf56e18020aef6465bf621a807dbdb813b889004ea65cb6d4ecee162c0bf8d: Status 404 returned error can't find the container with id bedf56e18020aef6465bf621a807dbdb813b889004ea65cb6d4ecee162c0bf8d Jan 29 16:32:21 crc kubenswrapper[4685]: I0129 16:32:21.609327 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14cfc19d-44fb-45de-9abc-df6d268d7cb2" path="/var/lib/kubelet/pods/14cfc19d-44fb-45de-9abc-df6d268d7cb2/volumes" Jan 29 16:32:22 crc kubenswrapper[4685]: I0129 16:32:22.204540 4685 generic.go:334] "Generic (PLEG): container finished" podID="0602efa0-2eae-4bfa-a9fb-b12853c13228" containerID="dff29a73bfecf1dcc278360b78acb32ec635e0ceb4778df585fc0100634c95dc" exitCode=0 Jan 29 16:32:22 crc kubenswrapper[4685]: I0129 16:32:22.204592 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwgss" event={"ID":"0602efa0-2eae-4bfa-a9fb-b12853c13228","Type":"ContainerDied","Data":"dff29a73bfecf1dcc278360b78acb32ec635e0ceb4778df585fc0100634c95dc"} Jan 29 16:32:22 crc kubenswrapper[4685]: I0129 16:32:22.206647 4685 generic.go:334] "Generic (PLEG): container finished" podID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerID="11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e" exitCode=0 Jan 29 16:32:22 crc kubenswrapper[4685]: I0129 16:32:22.206680 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerDied","Data":"11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e"} Jan 29 16:32:22 crc kubenswrapper[4685]: I0129 16:32:22.206707 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerStarted","Data":"bedf56e18020aef6465bf621a807dbdb813b889004ea65cb6d4ecee162c0bf8d"} Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.327577 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.328011 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tfh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwgss_openshift-marketplace(0602efa0-2eae-4bfa-a9fb-b12853c13228): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.329348 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.335429 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.335559 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6z6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wx9ts_openshift-marketplace(32247aeb-b5a0-4727-ae80-88aa6b9209f3): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:22 crc kubenswrapper[4685]: E0129 16:32:22.336749 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:32:23 crc kubenswrapper[4685]: E0129 16:32:23.213506 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:32:23 crc kubenswrapper[4685]: E0129 16:32:23.213562 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:32:30 crc kubenswrapper[4685]: E0129 16:32:30.751146 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:32:30 crc kubenswrapper[4685]: E0129 16:32:30.751856 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94hc8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cxvdf_openshift-marketplace(37a54549-4f2f-4b92-a1b1-99d418b04e93): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:30 crc kubenswrapper[4685]: E0129 16:32:30.753599 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:32:32 crc kubenswrapper[4685]: E0129 16:32:32.737772 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:32:32 crc kubenswrapper[4685]: E0129 16:32:32.737934 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jsvl9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mx7ws_openshift-marketplace(66f909c7-70dc-44c8-8fc3-22cddf5f9f76): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:32 crc kubenswrapper[4685]: E0129 16:32:32.739197 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:32:34 crc kubenswrapper[4685]: I0129 16:32:34.330720 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:32:34 crc kubenswrapper[4685]: I0129 16:32:34.330836 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:32:34 crc kubenswrapper[4685]: I0129 16:32:34.330920 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:32:34 crc kubenswrapper[4685]: I0129 16:32:34.332008 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:32:34 crc kubenswrapper[4685]: I0129 16:32:34.332156 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6" gracePeriod=600 Jan 29 16:32:35 crc kubenswrapper[4685]: I0129 16:32:35.306096 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6" exitCode=0 Jan 29 16:32:35 crc kubenswrapper[4685]: I0129 16:32:35.306233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6"} Jan 29 16:32:35 crc kubenswrapper[4685]: I0129 16:32:35.307183 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54"} Jan 29 16:32:35 crc kubenswrapper[4685]: I0129 16:32:35.307227 4685 scope.go:117] "RemoveContainer" containerID="ea34af4198b3048810a28965cde9c98f9f582d0fbb77c7119b50dff707ba28de" Jan 29 16:32:37 crc kubenswrapper[4685]: E0129 16:32:37.732519 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:32:37 crc kubenswrapper[4685]: E0129 16:32:37.732855 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tfh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwgss_openshift-marketplace(0602efa0-2eae-4bfa-a9fb-b12853c13228): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:37 crc kubenswrapper[4685]: E0129 16:32:37.734022 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:32:38 crc kubenswrapper[4685]: E0129 16:32:38.720577 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:32:38 crc kubenswrapper[4685]: E0129 16:32:38.721018 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6z6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wx9ts_openshift-marketplace(32247aeb-b5a0-4727-ae80-88aa6b9209f3): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:38 crc kubenswrapper[4685]: E0129 16:32:38.722824 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:32:41 crc kubenswrapper[4685]: E0129 16:32:41.604259 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:32:45 crc kubenswrapper[4685]: E0129 16:32:45.605481 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:32:50 crc kubenswrapper[4685]: E0129 16:32:50.605431 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:32:50 crc kubenswrapper[4685]: E0129 16:32:50.605476 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:32:54 crc kubenswrapper[4685]: E0129 16:32:54.928077 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:32:54 crc kubenswrapper[4685]: E0129 16:32:54.928627 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94hc8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cxvdf_openshift-marketplace(37a54549-4f2f-4b92-a1b1-99d418b04e93): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:54 crc kubenswrapper[4685]: E0129 16:32:54.929843 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:32:59 crc kubenswrapper[4685]: E0129 16:32:59.727255 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:32:59 crc kubenswrapper[4685]: E0129 16:32:59.727881 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jsvl9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mx7ws_openshift-marketplace(66f909c7-70dc-44c8-8fc3-22cddf5f9f76): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:32:59 crc kubenswrapper[4685]: E0129 16:32:59.729188 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:33:01 crc kubenswrapper[4685]: E0129 16:33:01.730060 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:33:01 crc kubenswrapper[4685]: E0129 16:33:01.730300 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tfh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwgss_openshift-marketplace(0602efa0-2eae-4bfa-a9fb-b12853c13228): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:33:01 crc kubenswrapper[4685]: E0129 16:33:01.731631 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:33:03 crc kubenswrapper[4685]: E0129 16:33:03.736563 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:33:03 crc kubenswrapper[4685]: E0129 16:33:03.737074 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6z6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wx9ts_openshift-marketplace(32247aeb-b5a0-4727-ae80-88aa6b9209f3): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:33:03 crc kubenswrapper[4685]: E0129 16:33:03.738619 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:33:07 crc kubenswrapper[4685]: E0129 16:33:07.605351 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:33:12 crc kubenswrapper[4685]: E0129 16:33:12.606976 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:33:14 crc kubenswrapper[4685]: E0129 16:33:14.603765 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:33:16 crc kubenswrapper[4685]: E0129 16:33:16.604327 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:33:22 crc kubenswrapper[4685]: E0129 16:33:22.605655 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:33:27 crc kubenswrapper[4685]: E0129 16:33:27.603742 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:33:29 crc kubenswrapper[4685]: E0129 16:33:29.604472 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:33:29 crc kubenswrapper[4685]: E0129 16:33:29.605071 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:33:34 crc kubenswrapper[4685]: E0129 16:33:34.604066 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:33:39 crc kubenswrapper[4685]: E0129 16:33:39.603964 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" Jan 29 16:33:40 crc kubenswrapper[4685]: E0129 16:33:40.603915 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" Jan 29 16:33:44 crc kubenswrapper[4685]: I0129 16:33:44.604431 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 29 16:33:44 crc kubenswrapper[4685]: E0129 16:33:44.732557 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:33:44 crc kubenswrapper[4685]: E0129 16:33:44.732728 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tfh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwgss_openshift-marketplace(0602efa0-2eae-4bfa-a9fb-b12853c13228): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:33:44 crc kubenswrapper[4685]: E0129 16:33:44.733915 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:33:46 crc kubenswrapper[4685]: E0129 16:33:46.736843 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:33:46 crc kubenswrapper[4685]: E0129 16:33:46.737504 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94hc8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cxvdf_openshift-marketplace(37a54549-4f2f-4b92-a1b1-99d418b04e93): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:33:46 crc kubenswrapper[4685]: E0129 16:33:46.738784 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:33:57 crc kubenswrapper[4685]: E0129 16:33:57.652922 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:33:59 crc kubenswrapper[4685]: E0129 16:33:59.603144 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:33:59 crc kubenswrapper[4685]: I0129 16:33:59.825933 4685 generic.go:334] "Generic (PLEG): container finished" podID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerID="a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe" exitCode=0 Jan 29 16:33:59 crc kubenswrapper[4685]: I0129 16:33:59.826008 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerDied","Data":"a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe"} Jan 29 16:34:00 crc kubenswrapper[4685]: I0129 16:34:00.834643 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerStarted","Data":"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747"} Jan 29 16:34:00 crc kubenswrapper[4685]: I0129 16:34:00.860944 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wx9ts" podStartSLOduration=2.7913806279999998 podStartE2EDuration="1m40.860924364s" podCreationTimestamp="2026-01-29 16:32:20 +0000 UTC" firstStartedPulling="2026-01-29 16:32:22.208257208 +0000 UTC m=+369.405613470" lastFinishedPulling="2026-01-29 16:34:00.277800944 +0000 UTC m=+467.475157206" observedRunningTime="2026-01-29 16:34:00.860159533 +0000 UTC m=+468.057515815" watchObservedRunningTime="2026-01-29 16:34:00.860924364 +0000 UTC m=+468.058280626" Jan 29 16:34:01 crc kubenswrapper[4685]: I0129 16:34:01.129203 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:34:01 crc kubenswrapper[4685]: I0129 16:34:01.129250 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:34:02 crc kubenswrapper[4685]: I0129 16:34:02.266474 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" probeResult="failure" output=< Jan 29 16:34:02 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Jan 29 16:34:02 crc kubenswrapper[4685]: > Jan 29 16:34:07 crc kubenswrapper[4685]: I0129 16:34:07.879803 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerStarted","Data":"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb"} Jan 29 16:34:08 crc kubenswrapper[4685]: I0129 16:34:08.887094 4685 generic.go:334] "Generic (PLEG): container finished" podID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerID="919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb" exitCode=0 Jan 29 16:34:08 crc kubenswrapper[4685]: I0129 16:34:08.887142 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerDied","Data":"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb"} Jan 29 16:34:10 crc kubenswrapper[4685]: I0129 16:34:10.900862 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerStarted","Data":"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044"} Jan 29 16:34:10 crc kubenswrapper[4685]: I0129 16:34:10.927032 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mx7ws" podStartSLOduration=2.942374836 podStartE2EDuration="1m52.927011019s" podCreationTimestamp="2026-01-29 16:32:18 +0000 UTC" firstStartedPulling="2026-01-29 16:32:20.187062235 +0000 UTC m=+367.384418497" lastFinishedPulling="2026-01-29 16:34:10.171698378 +0000 UTC m=+477.369054680" observedRunningTime="2026-01-29 16:34:10.925951149 +0000 UTC m=+478.123307411" watchObservedRunningTime="2026-01-29 16:34:10.927011019 +0000 UTC m=+478.124367281" Jan 29 16:34:11 crc kubenswrapper[4685]: I0129 16:34:11.178703 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:34:11 crc kubenswrapper[4685]: I0129 16:34:11.237405 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:34:11 crc kubenswrapper[4685]: E0129 16:34:11.604181 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:34:11 crc kubenswrapper[4685]: E0129 16:34:11.605463 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:34:19 crc kubenswrapper[4685]: I0129 16:34:19.024475 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:34:19 crc kubenswrapper[4685]: I0129 16:34:19.025260 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:34:19 crc kubenswrapper[4685]: I0129 16:34:19.078882 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:34:20 crc kubenswrapper[4685]: I0129 16:34:20.029697 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:34:24 crc kubenswrapper[4685]: E0129 16:34:24.606327 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:34:24 crc kubenswrapper[4685]: E0129 16:34:24.606803 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:34:34 crc kubenswrapper[4685]: I0129 16:34:34.330357 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:34:34 crc kubenswrapper[4685]: I0129 16:34:34.331085 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:34:35 crc kubenswrapper[4685]: E0129 16:34:35.604659 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:34:36 crc kubenswrapper[4685]: E0129 16:34:36.602964 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:34:50 crc kubenswrapper[4685]: E0129 16:34:50.606640 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:34:51 crc kubenswrapper[4685]: E0129 16:34:51.604454 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:04 crc kubenswrapper[4685]: I0129 16:35:04.331191 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:35:04 crc kubenswrapper[4685]: I0129 16:35:04.331815 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:35:05 crc kubenswrapper[4685]: E0129 16:35:05.612688 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:35:05 crc kubenswrapper[4685]: E0129 16:35:05.740242 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:35:05 crc kubenswrapper[4685]: E0129 16:35:05.740545 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tfh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwgss_openshift-marketplace(0602efa0-2eae-4bfa-a9fb-b12853c13228): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:35:05 crc kubenswrapper[4685]: E0129 16:35:05.741846 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:18 crc kubenswrapper[4685]: E0129 16:35:18.724608 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:35:18 crc kubenswrapper[4685]: E0129 16:35:18.725239 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94hc8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cxvdf_openshift-marketplace(37a54549-4f2f-4b92-a1b1-99d418b04e93): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:35:18 crc kubenswrapper[4685]: E0129 16:35:18.726529 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:35:20 crc kubenswrapper[4685]: E0129 16:35:20.604857 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:32 crc kubenswrapper[4685]: E0129 16:35:32.604287 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:33 crc kubenswrapper[4685]: E0129 16:35:33.606259 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:35:34 crc kubenswrapper[4685]: I0129 16:35:34.330951 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:35:34 crc kubenswrapper[4685]: I0129 16:35:34.331011 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:35:34 crc kubenswrapper[4685]: I0129 16:35:34.331055 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:35:34 crc kubenswrapper[4685]: I0129 16:35:34.331591 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:35:34 crc kubenswrapper[4685]: I0129 16:35:34.331649 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54" gracePeriod=600 Jan 29 16:35:35 crc kubenswrapper[4685]: I0129 16:35:35.429650 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54" exitCode=0 Jan 29 16:35:35 crc kubenswrapper[4685]: I0129 16:35:35.429754 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54"} Jan 29 16:35:35 crc kubenswrapper[4685]: I0129 16:35:35.430495 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341"} Jan 29 16:35:35 crc kubenswrapper[4685]: I0129 16:35:35.430543 4685 scope.go:117] "RemoveContainer" containerID="e7de6ec8d57c9bb74f79c5a4e9fa8e09deaf5359c715811ce05763f4d27b77e6" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.437009 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pm9td"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.438529 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.440107 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-ncn9b" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.440909 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.441916 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.449582 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pm9td"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.456745 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-ktj5s"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.457475 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.459637 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-rq5h9" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.462018 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-r8gnp"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.462850 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-r8gnp" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.464206 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vm8mx" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.467569 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-ktj5s"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.482881 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-r8gnp"] Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.543014 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpqtj\" (UniqueName: \"kubernetes.io/projected/aa58a373-0547-4352-910d-85b7e54e76e4-kube-api-access-tpqtj\") pod \"cert-manager-858654f9db-r8gnp\" (UID: \"aa58a373-0547-4352-910d-85b7e54e76e4\") " pod="cert-manager/cert-manager-858654f9db-r8gnp" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.543069 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhskc\" (UniqueName: \"kubernetes.io/projected/92c08ef0-ae20-4ff1-83d1-4cef334602ef-kube-api-access-dhskc\") pod \"cert-manager-cainjector-cf98fcc89-pm9td\" (UID: \"92c08ef0-ae20-4ff1-83d1-4cef334602ef\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.543145 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ck2b\" (UniqueName: \"kubernetes.io/projected/ff37a656-e807-48e5-a595-cc0d97f1133c-kube-api-access-4ck2b\") pod \"cert-manager-webhook-687f57d79b-ktj5s\" (UID: \"ff37a656-e807-48e5-a595-cc0d97f1133c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.644071 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ck2b\" (UniqueName: \"kubernetes.io/projected/ff37a656-e807-48e5-a595-cc0d97f1133c-kube-api-access-4ck2b\") pod \"cert-manager-webhook-687f57d79b-ktj5s\" (UID: \"ff37a656-e807-48e5-a595-cc0d97f1133c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.644170 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpqtj\" (UniqueName: \"kubernetes.io/projected/aa58a373-0547-4352-910d-85b7e54e76e4-kube-api-access-tpqtj\") pod \"cert-manager-858654f9db-r8gnp\" (UID: \"aa58a373-0547-4352-910d-85b7e54e76e4\") " pod="cert-manager/cert-manager-858654f9db-r8gnp" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.644206 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhskc\" (UniqueName: \"kubernetes.io/projected/92c08ef0-ae20-4ff1-83d1-4cef334602ef-kube-api-access-dhskc\") pod \"cert-manager-cainjector-cf98fcc89-pm9td\" (UID: \"92c08ef0-ae20-4ff1-83d1-4cef334602ef\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.663306 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpqtj\" (UniqueName: \"kubernetes.io/projected/aa58a373-0547-4352-910d-85b7e54e76e4-kube-api-access-tpqtj\") pod \"cert-manager-858654f9db-r8gnp\" (UID: \"aa58a373-0547-4352-910d-85b7e54e76e4\") " pod="cert-manager/cert-manager-858654f9db-r8gnp" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.663358 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ck2b\" (UniqueName: \"kubernetes.io/projected/ff37a656-e807-48e5-a595-cc0d97f1133c-kube-api-access-4ck2b\") pod \"cert-manager-webhook-687f57d79b-ktj5s\" (UID: \"ff37a656-e807-48e5-a595-cc0d97f1133c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.663987 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhskc\" (UniqueName: \"kubernetes.io/projected/92c08ef0-ae20-4ff1-83d1-4cef334602ef-kube-api-access-dhskc\") pod \"cert-manager-cainjector-cf98fcc89-pm9td\" (UID: \"92c08ef0-ae20-4ff1-83d1-4cef334602ef\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.756145 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.778637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.789948 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-r8gnp" Jan 29 16:35:41 crc kubenswrapper[4685]: I0129 16:35:41.978349 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pm9td"] Jan 29 16:35:42 crc kubenswrapper[4685]: I0129 16:35:42.034292 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-ktj5s"] Jan 29 16:35:42 crc kubenswrapper[4685]: I0129 16:35:42.233131 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-r8gnp"] Jan 29 16:35:42 crc kubenswrapper[4685]: W0129 16:35:42.236843 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa58a373_0547_4352_910d_85b7e54e76e4.slice/crio-efb1c361cb6293724679bde7439bed3998b7df97611a30c478727f8ac7848f1d WatchSource:0}: Error finding container efb1c361cb6293724679bde7439bed3998b7df97611a30c478727f8ac7848f1d: Status 404 returned error can't find the container with id efb1c361cb6293724679bde7439bed3998b7df97611a30c478727f8ac7848f1d Jan 29 16:35:42 crc kubenswrapper[4685]: I0129 16:35:42.469886 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-r8gnp" event={"ID":"aa58a373-0547-4352-910d-85b7e54e76e4","Type":"ContainerStarted","Data":"efb1c361cb6293724679bde7439bed3998b7df97611a30c478727f8ac7848f1d"} Jan 29 16:35:42 crc kubenswrapper[4685]: I0129 16:35:42.470754 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" event={"ID":"ff37a656-e807-48e5-a595-cc0d97f1133c","Type":"ContainerStarted","Data":"07259ef0f92d9f507a90e02cbb20f44b40120c94dfad67b724b903fee4a949b1"} Jan 29 16:35:42 crc kubenswrapper[4685]: I0129 16:35:42.472119 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" event={"ID":"92c08ef0-ae20-4ff1-83d1-4cef334602ef","Type":"ContainerStarted","Data":"8ae6a6d806ccde82ccbf319e79fe0cf1bc584d40790378ed7045b42ba9121c40"} Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.498520 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" event={"ID":"ff37a656-e807-48e5-a595-cc0d97f1133c","Type":"ContainerStarted","Data":"f4558024de69975831da4c9785cce57b9fb1f835ae23b92c7198998447613283"} Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.500760 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.502942 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" event={"ID":"92c08ef0-ae20-4ff1-83d1-4cef334602ef","Type":"ContainerStarted","Data":"e70b3eaf8a1b5d9c224d335d0e0a83c343b7c6f374fd76c486a5c0751eef9a63"} Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.507065 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-r8gnp" event={"ID":"aa58a373-0547-4352-910d-85b7e54e76e4","Type":"ContainerStarted","Data":"664100abc0d7f3f36aa7a6cf013e3ce52bdd76345345be27fb1f5a34f5826085"} Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.522982 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" podStartSLOduration=2.213556192 podStartE2EDuration="5.522959488s" podCreationTimestamp="2026-01-29 16:35:41 +0000 UTC" firstStartedPulling="2026-01-29 16:35:42.056468938 +0000 UTC m=+569.253825200" lastFinishedPulling="2026-01-29 16:35:45.365872234 +0000 UTC m=+572.563228496" observedRunningTime="2026-01-29 16:35:46.521346824 +0000 UTC m=+573.718703146" watchObservedRunningTime="2026-01-29 16:35:46.522959488 +0000 UTC m=+573.720315780" Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.548928 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-r8gnp" podStartSLOduration=2.339653124 podStartE2EDuration="5.548904095s" podCreationTimestamp="2026-01-29 16:35:41 +0000 UTC" firstStartedPulling="2026-01-29 16:35:42.238985129 +0000 UTC m=+569.436341391" lastFinishedPulling="2026-01-29 16:35:45.44823609 +0000 UTC m=+572.645592362" observedRunningTime="2026-01-29 16:35:46.542970946 +0000 UTC m=+573.740327248" watchObservedRunningTime="2026-01-29 16:35:46.548904095 +0000 UTC m=+573.746260397" Jan 29 16:35:46 crc kubenswrapper[4685]: I0129 16:35:46.568917 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pm9td" podStartSLOduration=2.203266094 podStartE2EDuration="5.568894283s" podCreationTimestamp="2026-01-29 16:35:41 +0000 UTC" firstStartedPulling="2026-01-29 16:35:41.998432916 +0000 UTC m=+569.195789178" lastFinishedPulling="2026-01-29 16:35:45.364061085 +0000 UTC m=+572.561417367" observedRunningTime="2026-01-29 16:35:46.564812854 +0000 UTC m=+573.762169186" watchObservedRunningTime="2026-01-29 16:35:46.568894283 +0000 UTC m=+573.766250585" Jan 29 16:35:47 crc kubenswrapper[4685]: E0129 16:35:47.604455 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:35:47 crc kubenswrapper[4685]: E0129 16:35:47.604475 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.459150 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pjcfc"] Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460340 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-controller" containerID="cri-o://0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460491 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-acl-logging" containerID="cri-o://fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460480 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460488 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="nbdb" containerID="cri-o://202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460670 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="northd" containerID="cri-o://6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460639 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="sbdb" containerID="cri-o://e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.460732 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-node" containerID="cri-o://06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.494042 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" containerID="cri-o://0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" gracePeriod=30 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.534455 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/2.log" Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.534986 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/1.log" Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.535031 4685 generic.go:334] "Generic (PLEG): container finished" podID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" containerID="7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3" exitCode=2 Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.535066 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerDied","Data":"7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3"} Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.535111 4685 scope.go:117] "RemoveContainer" containerID="d2c6c666b8b8ab0b834e058a300398325a628ac70c2772becbc7d734ab7b7f11" Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.535762 4685 scope.go:117] "RemoveContainer" containerID="7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3" Jan 29 16:35:51 crc kubenswrapper[4685]: E0129 16:35:51.536212 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8dlmj_openshift-multus(d75c6b72-980a-4110-b259-c4b54f0ea9b2)\"" pod="openshift-multus/multus-8dlmj" podUID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" Jan 29 16:35:51 crc kubenswrapper[4685]: I0129 16:35:51.782879 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-ktj5s" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.313838 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/3.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.315762 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovn-acl-logging/0.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.316268 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovn-controller/0.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.316695 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.388758 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxj7h"] Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.388979 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="northd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.388991 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="northd" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.389002 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-ovn-metrics" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.389008 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-ovn-metrics" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.389018 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.389026 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391382 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391423 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391435 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-acl-logging" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391441 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-acl-logging" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391452 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="sbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391458 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="sbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391469 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-node" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391476 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-node" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391491 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kubecfg-setup" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391497 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kubecfg-setup" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391506 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="nbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391511 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="nbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391521 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391527 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391533 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391539 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391662 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391674 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391682 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-node" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391690 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391698 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="kube-rbac-proxy-ovn-metrics" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391704 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391712 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovn-acl-logging" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391719 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391726 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="nbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391734 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="sbdb" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391741 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="northd" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391830 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391838 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.391845 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391850 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.391945 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" containerName="ovnkube-controller" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.393504 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402012 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402069 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402115 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jskgn\" (UniqueName: \"kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402139 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402204 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402228 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402248 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402278 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402311 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402349 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402372 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402448 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402481 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402512 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402537 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402560 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402587 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402615 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402647 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.402667 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns\") pod \"00a79737-7411-4a3e-8c27-e1e16951a730\" (UID: \"00a79737-7411-4a3e-8c27-e1e16951a730\") " Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403037 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403075 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403579 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log" (OuterVolumeSpecName: "node-log") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403609 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403634 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket" (OuterVolumeSpecName: "log-socket") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403653 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403671 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403689 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403707 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403691 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403740 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash" (OuterVolumeSpecName: "host-slash") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403726 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403772 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.403809 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.404226 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.404264 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.404349 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.408839 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.409653 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn" (OuterVolumeSpecName: "kube-api-access-jskgn") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "kube-api-access-jskgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.418978 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "00a79737-7411-4a3e-8c27-e1e16951a730" (UID: "00a79737-7411-4a3e-8c27-e1e16951a730"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.503972 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-config\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504048 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-script-lib\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504081 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-systemd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504143 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-netd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504165 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504251 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-systemd-units\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504305 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-slash\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504418 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/df4a4f47-e9a8-4606-b17b-9374568fa951-ovn-node-metrics-cert\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504450 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504481 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-env-overrides\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504505 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-netns\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504592 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-kubelet\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504633 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-etc-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504672 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcj2q\" (UniqueName: \"kubernetes.io/projected/df4a4f47-e9a8-4606-b17b-9374568fa951-kube-api-access-wcj2q\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504695 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-log-socket\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504713 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-var-lib-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504731 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-node-log\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504760 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504870 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-ovn\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504906 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-bin\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.504985 4685 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505000 4685 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505010 4685 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505018 4685 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-slash\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505026 4685 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505036 4685 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505044 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505054 4685 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505067 4685 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505086 4685 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505103 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jskgn\" (UniqueName: \"kubernetes.io/projected/00a79737-7411-4a3e-8c27-e1e16951a730-kube-api-access-jskgn\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505114 4685 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-log-socket\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505125 4685 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505135 4685 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505145 4685 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-node-log\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505154 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505163 4685 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505171 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00a79737-7411-4a3e-8c27-e1e16951a730-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505180 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00a79737-7411-4a3e-8c27-e1e16951a730-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.505190 4685 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00a79737-7411-4a3e-8c27-e1e16951a730-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.543796 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovnkube-controller/3.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.546047 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovn-acl-logging/0.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.546594 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pjcfc_00a79737-7411-4a3e-8c27-e1e16951a730/ovn-controller/0.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547040 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547074 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547085 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547097 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547090 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547113 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547124 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" exitCode=0 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547133 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" exitCode=143 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547136 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547149 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547167 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547179 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547191 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547203 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547215 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547226 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547234 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547230 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547143 4685 generic.go:334] "Generic (PLEG): container finished" podID="00a79737-7411-4a3e-8c27-e1e16951a730" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" exitCode=143 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547240 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547375 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547422 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547435 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547446 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547457 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547484 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547512 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547523 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547534 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547545 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547556 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547566 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547577 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547587 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547597 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547607 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547622 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547638 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547652 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547664 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547675 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547685 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547696 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547708 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547720 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547732 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547746 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547764 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pjcfc" event={"ID":"00a79737-7411-4a3e-8c27-e1e16951a730","Type":"ContainerDied","Data":"ae142fc9ca2dc4c04f0d0c1aa5f3ebdedad8db72fd338c7dfb257c0d5019483d"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547788 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547805 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547821 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547833 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547844 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547855 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547867 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547881 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547895 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.547910 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.549528 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/2.log" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.567504 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.585857 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pjcfc"] Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.589470 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pjcfc"] Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.602971 4685 scope.go:117] "RemoveContainer" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606084 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-systemd-units\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606123 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-slash\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606153 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/df4a4f47-e9a8-4606-b17b-9374568fa951-ovn-node-metrics-cert\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606172 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606192 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-env-overrides\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606206 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-systemd-units\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606252 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-slash\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606317 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606209 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-netns\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606400 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-kubelet\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606426 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-etc-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606460 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcj2q\" (UniqueName: \"kubernetes.io/projected/df4a4f47-e9a8-4606-b17b-9374568fa951-kube-api-access-wcj2q\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606481 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-log-socket\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606503 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-var-lib-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606523 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-node-log\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606556 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606601 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-ovn\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606617 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-bin\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606642 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-config\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606682 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-script-lib\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606721 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-systemd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606749 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-netd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606765 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606836 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606844 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-env-overrides\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606890 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-node-log\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606252 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-run-netns\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606924 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-kubelet\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.606945 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-etc-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607227 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-log-socket\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607270 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-var-lib-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607303 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-ovn\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607333 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-openvswitch\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607363 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-run-systemd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607641 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-bin\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607728 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/df4a4f47-e9a8-4606-b17b-9374568fa951-host-cni-netd\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607826 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-config\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.607953 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/df4a4f47-e9a8-4606-b17b-9374568fa951-ovnkube-script-lib\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.611184 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/df4a4f47-e9a8-4606-b17b-9374568fa951-ovn-node-metrics-cert\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.624854 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcj2q\" (UniqueName: \"kubernetes.io/projected/df4a4f47-e9a8-4606-b17b-9374568fa951-kube-api-access-wcj2q\") pod \"ovnkube-node-mxj7h\" (UID: \"df4a4f47-e9a8-4606-b17b-9374568fa951\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.659060 4685 scope.go:117] "RemoveContainer" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.670946 4685 scope.go:117] "RemoveContainer" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.681609 4685 scope.go:117] "RemoveContainer" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.694356 4685 scope.go:117] "RemoveContainer" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.706561 4685 scope.go:117] "RemoveContainer" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.720362 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.720984 4685 scope.go:117] "RemoveContainer" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.740006 4685 scope.go:117] "RemoveContainer" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: W0129 16:35:52.745172 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf4a4f47_e9a8_4606_b17b_9374568fa951.slice/crio-031fba11d3cf1c663aacfaebad9c12d251c647ffb94adf99887642250078ba29 WatchSource:0}: Error finding container 031fba11d3cf1c663aacfaebad9c12d251c647ffb94adf99887642250078ba29: Status 404 returned error can't find the container with id 031fba11d3cf1c663aacfaebad9c12d251c647ffb94adf99887642250078ba29 Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.768624 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.769036 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769061 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} err="failed to get container status \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769080 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.769351 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": container with ID starting with 27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6 not found: ID does not exist" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769368 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} err="failed to get container status \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": rpc error: code = NotFound desc = could not find container \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": container with ID starting with 27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769380 4685 scope.go:117] "RemoveContainer" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.769638 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": container with ID starting with e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700 not found: ID does not exist" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769652 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} err="failed to get container status \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": rpc error: code = NotFound desc = could not find container \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": container with ID starting with e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769664 4685 scope.go:117] "RemoveContainer" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.769907 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": container with ID starting with 202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414 not found: ID does not exist" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769922 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} err="failed to get container status \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": rpc error: code = NotFound desc = could not find container \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": container with ID starting with 202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.769936 4685 scope.go:117] "RemoveContainer" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.770099 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": container with ID starting with 6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532 not found: ID does not exist" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770114 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} err="failed to get container status \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": rpc error: code = NotFound desc = could not find container \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": container with ID starting with 6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770127 4685 scope.go:117] "RemoveContainer" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.770471 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": container with ID starting with 78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a not found: ID does not exist" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770487 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} err="failed to get container status \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": rpc error: code = NotFound desc = could not find container \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": container with ID starting with 78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770499 4685 scope.go:117] "RemoveContainer" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.770865 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": container with ID starting with 06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7 not found: ID does not exist" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770879 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} err="failed to get container status \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": rpc error: code = NotFound desc = could not find container \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": container with ID starting with 06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.770890 4685 scope.go:117] "RemoveContainer" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.771118 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": container with ID starting with fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd not found: ID does not exist" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.771132 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} err="failed to get container status \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": rpc error: code = NotFound desc = could not find container \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": container with ID starting with fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.771145 4685 scope.go:117] "RemoveContainer" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.771326 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": container with ID starting with 0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479 not found: ID does not exist" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.771342 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} err="failed to get container status \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": rpc error: code = NotFound desc = could not find container \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": container with ID starting with 0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.771355 4685 scope.go:117] "RemoveContainer" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: E0129 16:35:52.771949 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": container with ID starting with 8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805 not found: ID does not exist" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.771992 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} err="failed to get container status \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": rpc error: code = NotFound desc = could not find container \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": container with ID starting with 8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.772021 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.772490 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} err="failed to get container status \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.772519 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.772796 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} err="failed to get container status \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": rpc error: code = NotFound desc = could not find container \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": container with ID starting with 27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.772826 4685 scope.go:117] "RemoveContainer" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773041 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} err="failed to get container status \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": rpc error: code = NotFound desc = could not find container \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": container with ID starting with e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773067 4685 scope.go:117] "RemoveContainer" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773431 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} err="failed to get container status \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": rpc error: code = NotFound desc = could not find container \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": container with ID starting with 202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773449 4685 scope.go:117] "RemoveContainer" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773639 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} err="failed to get container status \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": rpc error: code = NotFound desc = could not find container \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": container with ID starting with 6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773657 4685 scope.go:117] "RemoveContainer" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773925 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} err="failed to get container status \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": rpc error: code = NotFound desc = could not find container \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": container with ID starting with 78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.773942 4685 scope.go:117] "RemoveContainer" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774143 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} err="failed to get container status \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": rpc error: code = NotFound desc = could not find container \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": container with ID starting with 06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774159 4685 scope.go:117] "RemoveContainer" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774465 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} err="failed to get container status \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": rpc error: code = NotFound desc = could not find container \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": container with ID starting with fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774483 4685 scope.go:117] "RemoveContainer" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774702 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} err="failed to get container status \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": rpc error: code = NotFound desc = could not find container \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": container with ID starting with 0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774721 4685 scope.go:117] "RemoveContainer" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774943 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} err="failed to get container status \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": rpc error: code = NotFound desc = could not find container \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": container with ID starting with 8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.774961 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775216 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} err="failed to get container status \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775232 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775499 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} err="failed to get container status \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": rpc error: code = NotFound desc = could not find container \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": container with ID starting with 27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775515 4685 scope.go:117] "RemoveContainer" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775721 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} err="failed to get container status \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": rpc error: code = NotFound desc = could not find container \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": container with ID starting with e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775742 4685 scope.go:117] "RemoveContainer" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775954 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} err="failed to get container status \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": rpc error: code = NotFound desc = could not find container \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": container with ID starting with 202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.775976 4685 scope.go:117] "RemoveContainer" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776204 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} err="failed to get container status \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": rpc error: code = NotFound desc = could not find container \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": container with ID starting with 6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776233 4685 scope.go:117] "RemoveContainer" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776454 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} err="failed to get container status \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": rpc error: code = NotFound desc = could not find container \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": container with ID starting with 78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776473 4685 scope.go:117] "RemoveContainer" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776680 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} err="failed to get container status \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": rpc error: code = NotFound desc = could not find container \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": container with ID starting with 06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776698 4685 scope.go:117] "RemoveContainer" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776952 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} err="failed to get container status \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": rpc error: code = NotFound desc = could not find container \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": container with ID starting with fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.776970 4685 scope.go:117] "RemoveContainer" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777219 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} err="failed to get container status \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": rpc error: code = NotFound desc = could not find container \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": container with ID starting with 0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777236 4685 scope.go:117] "RemoveContainer" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777536 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} err="failed to get container status \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": rpc error: code = NotFound desc = could not find container \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": container with ID starting with 8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777556 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777814 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} err="failed to get container status \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.777872 4685 scope.go:117] "RemoveContainer" containerID="27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778168 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6"} err="failed to get container status \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": rpc error: code = NotFound desc = could not find container \"27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6\": container with ID starting with 27a84d6f8750d9b9e4e60e1702151200af3249fd9267c080d9bf0644e0eed4e6 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778187 4685 scope.go:117] "RemoveContainer" containerID="e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778468 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700"} err="failed to get container status \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": rpc error: code = NotFound desc = could not find container \"e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700\": container with ID starting with e78aeac5be32637bfe9739ae0a952044ccf3a13139175fe046479b746feaf700 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778492 4685 scope.go:117] "RemoveContainer" containerID="202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778786 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414"} err="failed to get container status \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": rpc error: code = NotFound desc = could not find container \"202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414\": container with ID starting with 202664f624a98ec7251aa064a7b9c8f33f5548b153e7933ff94c479453eba414 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.778804 4685 scope.go:117] "RemoveContainer" containerID="6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779094 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532"} err="failed to get container status \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": rpc error: code = NotFound desc = could not find container \"6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532\": container with ID starting with 6968c85534df16fb53941e661e498d19d9804e8dc945bbb2797cee1b58c93532 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779114 4685 scope.go:117] "RemoveContainer" containerID="78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779453 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a"} err="failed to get container status \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": rpc error: code = NotFound desc = could not find container \"78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a\": container with ID starting with 78aa56dbf44fbf419c27da7ef0ece66b42575682101a7c371e9f30940ff7ca6a not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779473 4685 scope.go:117] "RemoveContainer" containerID="06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779720 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7"} err="failed to get container status \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": rpc error: code = NotFound desc = could not find container \"06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7\": container with ID starting with 06ffac9c5991fde89d1f1bd350ab162d1df6f9838865d699c933f27bb8ab38c7 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.779742 4685 scope.go:117] "RemoveContainer" containerID="fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780015 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd"} err="failed to get container status \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": rpc error: code = NotFound desc = could not find container \"fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd\": container with ID starting with fb3199ff24e5ce69b1a999294bc1a55ae9644870009fe08f8eeeaa80cb170dcd not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780037 4685 scope.go:117] "RemoveContainer" containerID="0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780344 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479"} err="failed to get container status \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": rpc error: code = NotFound desc = could not find container \"0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479\": container with ID starting with 0809fa10de983de535ed8af502cf5f976fda3ac1dd8297513c52c2c61fa11479 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780366 4685 scope.go:117] "RemoveContainer" containerID="8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780686 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805"} err="failed to get container status \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": rpc error: code = NotFound desc = could not find container \"8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805\": container with ID starting with 8c938024659bed0065103e1b75dd1f92775c1d252bf46f39d34f9798f5186805 not found: ID does not exist" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780706 4685 scope.go:117] "RemoveContainer" containerID="0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242" Jan 29 16:35:52 crc kubenswrapper[4685]: I0129 16:35:52.780974 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242"} err="failed to get container status \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": rpc error: code = NotFound desc = could not find container \"0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242\": container with ID starting with 0553837ab0ccd36581061be2da81249eb9afe3c9e943ee9ef85d269f5ba32242 not found: ID does not exist" Jan 29 16:35:53 crc kubenswrapper[4685]: I0129 16:35:53.557487 4685 generic.go:334] "Generic (PLEG): container finished" podID="df4a4f47-e9a8-4606-b17b-9374568fa951" containerID="9f42e196aec90538513698b42c87739e5a78f39b0680730f312262b78f582f2f" exitCode=0 Jan 29 16:35:53 crc kubenswrapper[4685]: I0129 16:35:53.557677 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerDied","Data":"9f42e196aec90538513698b42c87739e5a78f39b0680730f312262b78f582f2f"} Jan 29 16:35:53 crc kubenswrapper[4685]: I0129 16:35:53.557850 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"031fba11d3cf1c663aacfaebad9c12d251c647ffb94adf99887642250078ba29"} Jan 29 16:35:53 crc kubenswrapper[4685]: I0129 16:35:53.611315 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00a79737-7411-4a3e-8c27-e1e16951a730" path="/var/lib/kubelet/pods/00a79737-7411-4a3e-8c27-e1e16951a730/volumes" Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.568984 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"f1eab9a531d83bd98d7fab1a2efdbdcc715f03c04b8b1f3f11a44c8ca6757f02"} Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.569377 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"1a0388b6c29695ae056fee5530b01da79a1b703c203d275e319f1073a13f204e"} Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.569424 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"857b2194dd70d5500e0fb40926794453610359294e5ce55fd64f4533908fdc6e"} Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.569442 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"b0cd1c1a45d1ce2bfbf37ab099bf82e6d9a755ed82674b710b0069a8bdae3394"} Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.569458 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"4a82d5bf6c67dfea52769f5062a6997513c45c14f5085635fb868592721b21dc"} Jan 29 16:35:54 crc kubenswrapper[4685]: I0129 16:35:54.569474 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"48cdfed46a301eac854dc66781c27ffb8c995fadd2c9844e585b591474a89992"} Jan 29 16:35:56 crc kubenswrapper[4685]: I0129 16:35:56.584903 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"30b0e68be1abae7662e205cc8564405991c41f2132e1ff7249d4117945b0ab67"} Jan 29 16:35:58 crc kubenswrapper[4685]: E0129 16:35:58.603478 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:35:58 crc kubenswrapper[4685]: E0129 16:35:58.604895 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:35:59 crc kubenswrapper[4685]: I0129 16:35:59.621978 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" event={"ID":"df4a4f47-e9a8-4606-b17b-9374568fa951","Type":"ContainerStarted","Data":"980b82b2b8b501399d25e86140dc9867193cc81568e06bff28ca745954351c7b"} Jan 29 16:35:59 crc kubenswrapper[4685]: I0129 16:35:59.622581 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:59 crc kubenswrapper[4685]: I0129 16:35:59.652957 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:35:59 crc kubenswrapper[4685]: I0129 16:35:59.654683 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" podStartSLOduration=7.654667171 podStartE2EDuration="7.654667171s" podCreationTimestamp="2026-01-29 16:35:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:35:59.650204189 +0000 UTC m=+586.847560501" watchObservedRunningTime="2026-01-29 16:35:59.654667171 +0000 UTC m=+586.852023443" Jan 29 16:36:00 crc kubenswrapper[4685]: I0129 16:36:00.628119 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:36:00 crc kubenswrapper[4685]: I0129 16:36:00.629609 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:36:00 crc kubenswrapper[4685]: I0129 16:36:00.665881 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:36:03 crc kubenswrapper[4685]: I0129 16:36:03.605314 4685 scope.go:117] "RemoveContainer" containerID="7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3" Jan 29 16:36:03 crc kubenswrapper[4685]: E0129 16:36:03.606027 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8dlmj_openshift-multus(d75c6b72-980a-4110-b259-c4b54f0ea9b2)\"" pod="openshift-multus/multus-8dlmj" podUID="d75c6b72-980a-4110-b259-c4b54f0ea9b2" Jan 29 16:36:10 crc kubenswrapper[4685]: E0129 16:36:10.604902 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:36:13 crc kubenswrapper[4685]: E0129 16:36:13.611469 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:36:14 crc kubenswrapper[4685]: I0129 16:36:14.601997 4685 scope.go:117] "RemoveContainer" containerID="7e01261b8932f8d05469e1ffa919449dd5326d61d65c51b235fa210dfc03f4b3" Jan 29 16:36:15 crc kubenswrapper[4685]: I0129 16:36:15.730800 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8dlmj_d75c6b72-980a-4110-b259-c4b54f0ea9b2/kube-multus/2.log" Jan 29 16:36:15 crc kubenswrapper[4685]: I0129 16:36:15.731292 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8dlmj" event={"ID":"d75c6b72-980a-4110-b259-c4b54f0ea9b2","Type":"ContainerStarted","Data":"ab19f8d6f1892548e0e1e04a7d98d8b1675eb9bce4179f036ce1ab65fa141361"} Jan 29 16:36:21 crc kubenswrapper[4685]: E0129 16:36:21.605726 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwgss" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" Jan 29 16:36:22 crc kubenswrapper[4685]: I0129 16:36:22.754003 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxj7h" Jan 29 16:36:25 crc kubenswrapper[4685]: E0129 16:36:25.604517 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cxvdf" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.939156 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.942768 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.946010 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.947354 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" podUID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" containerName="marketplace-operator" containerID="cri-o://120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8" gracePeriod=30 Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.949797 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.963831 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" containerID="cri-o://01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" gracePeriod=30 Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.992553 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:36:30 crc kubenswrapper[4685]: I0129 16:36:30.993315 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mx7ws" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="registry-server" containerID="cri-o://5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044" gracePeriod=30 Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.002061 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t822z"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.043255 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.057344 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t822z"] Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.129417 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 is running failed: container process not found" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" cmd=["grpc_health_probe","-addr=:50051"] Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.130597 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 is running failed: container process not found" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" cmd=["grpc_health_probe","-addr=:50051"] Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.131315 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 is running failed: container process not found" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" cmd=["grpc_health_probe","-addr=:50051"] Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.131359 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-wx9ts" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.145504 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.145601 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s57hh\" (UniqueName: \"kubernetes.io/projected/6004d355-0746-423d-a4e5-fe07d8494da1-kube-api-access-s57hh\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.145666 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.246973 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.247067 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.247089 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s57hh\" (UniqueName: \"kubernetes.io/projected/6004d355-0746-423d-a4e5-fe07d8494da1-kube-api-access-s57hh\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.248957 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.255653 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6004d355-0746-423d-a4e5-fe07d8494da1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.264922 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s57hh\" (UniqueName: \"kubernetes.io/projected/6004d355-0746-423d-a4e5-fe07d8494da1-kube-api-access-s57hh\") pod \"marketplace-operator-79b997595-t822z\" (UID: \"6004d355-0746-423d-a4e5-fe07d8494da1\") " pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.306519 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.308333 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.314882 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.359087 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.389211 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.393793 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.448780 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content\") pod \"0602efa0-2eae-4bfa-a9fb-b12853c13228\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.448866 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tfh5\" (UniqueName: \"kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5\") pod \"0602efa0-2eae-4bfa-a9fb-b12853c13228\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.448942 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content\") pod \"37a54549-4f2f-4b92-a1b1-99d418b04e93\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.448989 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities\") pod \"0602efa0-2eae-4bfa-a9fb-b12853c13228\" (UID: \"0602efa0-2eae-4bfa-a9fb-b12853c13228\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449024 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities\") pod \"37a54549-4f2f-4b92-a1b1-99d418b04e93\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449044 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94hc8\" (UniqueName: \"kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8\") pod \"37a54549-4f2f-4b92-a1b1-99d418b04e93\" (UID: \"37a54549-4f2f-4b92-a1b1-99d418b04e93\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449164 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37a54549-4f2f-4b92-a1b1-99d418b04e93" (UID: "37a54549-4f2f-4b92-a1b1-99d418b04e93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449235 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0602efa0-2eae-4bfa-a9fb-b12853c13228" (UID: "0602efa0-2eae-4bfa-a9fb-b12853c13228"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449461 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449479 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.449929 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities" (OuterVolumeSpecName: "utilities") pod "0602efa0-2eae-4bfa-a9fb-b12853c13228" (UID: "0602efa0-2eae-4bfa-a9fb-b12853c13228"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.450542 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities" (OuterVolumeSpecName: "utilities") pod "37a54549-4f2f-4b92-a1b1-99d418b04e93" (UID: "37a54549-4f2f-4b92-a1b1-99d418b04e93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.452887 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5" (OuterVolumeSpecName: "kube-api-access-9tfh5") pod "0602efa0-2eae-4bfa-a9fb-b12853c13228" (UID: "0602efa0-2eae-4bfa-a9fb-b12853c13228"). InnerVolumeSpecName "kube-api-access-9tfh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.453929 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8" (OuterVolumeSpecName: "kube-api-access-94hc8") pod "37a54549-4f2f-4b92-a1b1-99d418b04e93" (UID: "37a54549-4f2f-4b92-a1b1-99d418b04e93"). InnerVolumeSpecName "kube-api-access-94hc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.503662 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t822z"] Jan 29 16:36:31 crc kubenswrapper[4685]: W0129 16:36:31.506807 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6004d355_0746_423d_a4e5_fe07d8494da1.slice/crio-0b577a4cbd03f3e9feaf56a3409c0ec595c72d83510c064b89e49146c1a1f1f9 WatchSource:0}: Error finding container 0b577a4cbd03f3e9feaf56a3409c0ec595c72d83510c064b89e49146c1a1f1f9: Status 404 returned error can't find the container with id 0b577a4cbd03f3e9feaf56a3409c0ec595c72d83510c064b89e49146c1a1f1f9 Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.549922 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9drz\" (UniqueName: \"kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz\") pod \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.549976 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities\") pod \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550011 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content\") pod \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550045 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities\") pod \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content\") pod \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550108 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6z6j\" (UniqueName: \"kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j\") pod \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\" (UID: \"32247aeb-b5a0-4727-ae80-88aa6b9209f3\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550138 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsvl9\" (UniqueName: \"kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9\") pod \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\" (UID: \"66f909c7-70dc-44c8-8fc3-22cddf5f9f76\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550212 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics\") pod \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550580 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca\") pod \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\" (UID: \"fa8250b5-b775-4d1a-b7ee-659aab2dc188\") " Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550757 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0602efa0-2eae-4bfa-a9fb-b12853c13228-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550769 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a54549-4f2f-4b92-a1b1-99d418b04e93-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550778 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94hc8\" (UniqueName: \"kubernetes.io/projected/37a54549-4f2f-4b92-a1b1-99d418b04e93-kube-api-access-94hc8\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.550787 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tfh5\" (UniqueName: \"kubernetes.io/projected/0602efa0-2eae-4bfa-a9fb-b12853c13228-kube-api-access-9tfh5\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.551008 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities" (OuterVolumeSpecName: "utilities") pod "66f909c7-70dc-44c8-8fc3-22cddf5f9f76" (UID: "66f909c7-70dc-44c8-8fc3-22cddf5f9f76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.551025 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities" (OuterVolumeSpecName: "utilities") pod "32247aeb-b5a0-4727-ae80-88aa6b9209f3" (UID: "32247aeb-b5a0-4727-ae80-88aa6b9209f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.551437 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "fa8250b5-b775-4d1a-b7ee-659aab2dc188" (UID: "fa8250b5-b775-4d1a-b7ee-659aab2dc188"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.553584 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "fa8250b5-b775-4d1a-b7ee-659aab2dc188" (UID: "fa8250b5-b775-4d1a-b7ee-659aab2dc188"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.553677 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9" (OuterVolumeSpecName: "kube-api-access-jsvl9") pod "66f909c7-70dc-44c8-8fc3-22cddf5f9f76" (UID: "66f909c7-70dc-44c8-8fc3-22cddf5f9f76"). InnerVolumeSpecName "kube-api-access-jsvl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.558616 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz" (OuterVolumeSpecName: "kube-api-access-z9drz") pod "fa8250b5-b775-4d1a-b7ee-659aab2dc188" (UID: "fa8250b5-b775-4d1a-b7ee-659aab2dc188"). InnerVolumeSpecName "kube-api-access-z9drz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.558657 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j" (OuterVolumeSpecName: "kube-api-access-x6z6j") pod "32247aeb-b5a0-4727-ae80-88aa6b9209f3" (UID: "32247aeb-b5a0-4727-ae80-88aa6b9209f3"). InnerVolumeSpecName "kube-api-access-x6z6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.571680 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32247aeb-b5a0-4727-ae80-88aa6b9209f3" (UID: "32247aeb-b5a0-4727-ae80-88aa6b9209f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651786 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651822 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6z6j\" (UniqueName: \"kubernetes.io/projected/32247aeb-b5a0-4727-ae80-88aa6b9209f3-kube-api-access-x6z6j\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651835 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsvl9\" (UniqueName: \"kubernetes.io/projected/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-kube-api-access-jsvl9\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651847 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651861 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa8250b5-b775-4d1a-b7ee-659aab2dc188-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651875 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9drz\" (UniqueName: \"kubernetes.io/projected/fa8250b5-b775-4d1a-b7ee-659aab2dc188-kube-api-access-z9drz\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651886 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.651898 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32247aeb-b5a0-4727-ae80-88aa6b9209f3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.685060 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66f909c7-70dc-44c8-8fc3-22cddf5f9f76" (UID: "66f909c7-70dc-44c8-8fc3-22cddf5f9f76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.753247 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66f909c7-70dc-44c8-8fc3-22cddf5f9f76-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.839346 4685 generic.go:334] "Generic (PLEG): container finished" podID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" containerID="120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8" exitCode=0 Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.839423 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" event={"ID":"fa8250b5-b775-4d1a-b7ee-659aab2dc188","Type":"ContainerDied","Data":"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.839947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" event={"ID":"fa8250b5-b775-4d1a-b7ee-659aab2dc188","Type":"ContainerDied","Data":"c2cd8d5ddcb2c267684e01f70a11d1c35e7555a9fa229df38f5448eddc83bb97"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.839452 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8blbd" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.840010 4685 scope.go:117] "RemoveContainer" containerID="120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.842368 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" event={"ID":"6004d355-0746-423d-a4e5-fe07d8494da1","Type":"ContainerStarted","Data":"2291acb51100db05c1fcf3829b637cc86a9218422df7f9284068dbb4c90e70f2"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.842591 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" event={"ID":"6004d355-0746-423d-a4e5-fe07d8494da1","Type":"ContainerStarted","Data":"0b577a4cbd03f3e9feaf56a3409c0ec595c72d83510c064b89e49146c1a1f1f9"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.842814 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.843691 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t822z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/healthz\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.843729 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" podUID="6004d355-0746-423d-a4e5-fe07d8494da1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.8:8080/healthz\": dial tcp 10.217.0.8:8080: connect: connection refused" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.844916 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxvdf" event={"ID":"37a54549-4f2f-4b92-a1b1-99d418b04e93","Type":"ContainerDied","Data":"66ff64e001d6e6e811420d883c3fdaf0b201784c37eb8a4c8d5ca3ecc86fe517"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.845189 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxvdf" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.847411 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwgss" event={"ID":"0602efa0-2eae-4bfa-a9fb-b12853c13228","Type":"ContainerDied","Data":"5173a7929946a8c7705c3a25cf3b96c2060fb61d999ffebde462c8a98fd9dfe5"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.847684 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwgss" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.852569 4685 generic.go:334] "Generic (PLEG): container finished" podID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerID="5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044" exitCode=0 Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.852637 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerDied","Data":"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.852664 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx7ws" event={"ID":"66f909c7-70dc-44c8-8fc3-22cddf5f9f76","Type":"ContainerDied","Data":"6858bb0485f11d94a77d636355f15c12ccbf1aa1fd3a99f581c7a5a1b705bff1"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.852669 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx7ws" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.858350 4685 generic.go:334] "Generic (PLEG): container finished" podID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" exitCode=0 Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.858521 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerDied","Data":"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.858608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wx9ts" event={"ID":"32247aeb-b5a0-4727-ae80-88aa6b9209f3","Type":"ContainerDied","Data":"bedf56e18020aef6465bf621a807dbdb813b889004ea65cb6d4ecee162c0bf8d"} Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.858705 4685 scope.go:117] "RemoveContainer" containerID="120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.858849 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wx9ts" Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.861144 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8\": container with ID starting with 120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8 not found: ID does not exist" containerID="120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.861270 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8"} err="failed to get container status \"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8\": rpc error: code = NotFound desc = could not find container \"120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8\": container with ID starting with 120a92caca0c80428ced2f56c5a0837f9e9c3d3dfe30521fa52dde09ff1fb5c8 not found: ID does not exist" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.861439 4685 scope.go:117] "RemoveContainer" containerID="14a921dcd1313c218b53624b234a9aa82ec8adfffd0d01001dcb00413442d45a" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.883341 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" podStartSLOduration=1.8833197739999998 podStartE2EDuration="1.883319774s" podCreationTimestamp="2026-01-29 16:36:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-29 16:36:31.866711279 +0000 UTC m=+619.064067591" watchObservedRunningTime="2026-01-29 16:36:31.883319774 +0000 UTC m=+619.080676066" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.891199 4685 scope.go:117] "RemoveContainer" containerID="dff29a73bfecf1dcc278360b78acb32ec635e0ceb4778df585fc0100634c95dc" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.893856 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.897783 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8blbd"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.901637 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.906066 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mx7ws"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.907199 4685 scope.go:117] "RemoveContainer" containerID="5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.921501 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.925679 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cxvdf"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.927321 4685 scope.go:117] "RemoveContainer" containerID="919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.960969 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.962207 4685 scope.go:117] "RemoveContainer" containerID="be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.964935 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pwgss"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.969233 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.976051 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wx9ts"] Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.977631 4685 scope.go:117] "RemoveContainer" containerID="5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044" Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.978060 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044\": container with ID starting with 5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044 not found: ID does not exist" containerID="5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978103 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044"} err="failed to get container status \"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044\": rpc error: code = NotFound desc = could not find container \"5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044\": container with ID starting with 5981239670ad741a4b76a47c3bd11bef5c0146065ac712a764f609e3d7465044 not found: ID does not exist" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978132 4685 scope.go:117] "RemoveContainer" containerID="919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb" Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.978439 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb\": container with ID starting with 919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb not found: ID does not exist" containerID="919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978472 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb"} err="failed to get container status \"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb\": rpc error: code = NotFound desc = could not find container \"919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb\": container with ID starting with 919f660e48b12cd634b86c8cb16a0c1b5143dfb03197a7fb2c88b2f59b0761fb not found: ID does not exist" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978499 4685 scope.go:117] "RemoveContainer" containerID="be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36" Jan 29 16:36:31 crc kubenswrapper[4685]: E0129 16:36:31.978705 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36\": container with ID starting with be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36 not found: ID does not exist" containerID="be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978726 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36"} err="failed to get container status \"be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36\": rpc error: code = NotFound desc = could not find container \"be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36\": container with ID starting with be4dd0d7ee43509aa241550514b9cabf7a71c5be006dbfd5fea4dc6c9f167a36 not found: ID does not exist" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.978739 4685 scope.go:117] "RemoveContainer" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" Jan 29 16:36:31 crc kubenswrapper[4685]: I0129 16:36:31.998767 4685 scope.go:117] "RemoveContainer" containerID="a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.017090 4685 scope.go:117] "RemoveContainer" containerID="11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.028334 4685 scope.go:117] "RemoveContainer" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" Jan 29 16:36:32 crc kubenswrapper[4685]: E0129 16:36:32.028867 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747\": container with ID starting with 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 not found: ID does not exist" containerID="01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.028894 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747"} err="failed to get container status \"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747\": rpc error: code = NotFound desc = could not find container \"01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747\": container with ID starting with 01fb8d3199a4ad021ac27b602f1d3988288e83d56419aa88b37682ebc55a6747 not found: ID does not exist" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.028915 4685 scope.go:117] "RemoveContainer" containerID="a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe" Jan 29 16:36:32 crc kubenswrapper[4685]: E0129 16:36:32.029282 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe\": container with ID starting with a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe not found: ID does not exist" containerID="a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.029301 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe"} err="failed to get container status \"a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe\": rpc error: code = NotFound desc = could not find container \"a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe\": container with ID starting with a2495ec5bb0404ded76d9367f4961fe225c186093b05884790ec92faa9028ffe not found: ID does not exist" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.029313 4685 scope.go:117] "RemoveContainer" containerID="11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e" Jan 29 16:36:32 crc kubenswrapper[4685]: E0129 16:36:32.029508 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e\": container with ID starting with 11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e not found: ID does not exist" containerID="11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.029528 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e"} err="failed to get container status \"11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e\": rpc error: code = NotFound desc = could not find container \"11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e\": container with ID starting with 11c09ff856403c30ce1806c63bd1946d69a36f55b65460cc02218cd9adb0256e not found: ID does not exist" Jan 29 16:36:32 crc kubenswrapper[4685]: I0129 16:36:32.877356 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-t822z" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.148716 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f5k6j"] Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.148964 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.148985 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149000 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="extract-content" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149010 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="extract-content" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149037 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149048 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149059 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149066 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149078 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149086 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149096 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" containerName="marketplace-operator" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149104 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" containerName="marketplace-operator" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149120 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149130 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149145 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149155 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: E0129 16:36:33.149167 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="extract-content" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149177 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="extract-content" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149362 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149380 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149405 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" containerName="marketplace-operator" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149418 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" containerName="registry-server" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.149428 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" containerName="extract-utilities" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.150254 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.153092 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.161258 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5k6j"] Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.271922 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-utilities\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.271975 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-catalog-content\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.272038 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbbsx\" (UniqueName: \"kubernetes.io/projected/3c3daf83-3283-4698-b95a-b03aafe35302-kube-api-access-kbbsx\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.346074 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tr74r"] Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.349750 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tr74r"] Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.349905 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.352481 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.372978 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-utilities\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.373059 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-utilities\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.373112 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-catalog-content\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.373166 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gvnd\" (UniqueName: \"kubernetes.io/projected/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-kube-api-access-7gvnd\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.373206 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-catalog-content\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.373256 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbbsx\" (UniqueName: \"kubernetes.io/projected/3c3daf83-3283-4698-b95a-b03aafe35302-kube-api-access-kbbsx\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.374024 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-utilities\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.374272 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c3daf83-3283-4698-b95a-b03aafe35302-catalog-content\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.401195 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbbsx\" (UniqueName: \"kubernetes.io/projected/3c3daf83-3283-4698-b95a-b03aafe35302-kube-api-access-kbbsx\") pod \"redhat-marketplace-f5k6j\" (UID: \"3c3daf83-3283-4698-b95a-b03aafe35302\") " pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.466819 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.474243 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-utilities\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.474360 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gvnd\" (UniqueName: \"kubernetes.io/projected/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-kube-api-access-7gvnd\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.474431 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-catalog-content\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.475057 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-catalog-content\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.475604 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-utilities\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.496260 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gvnd\" (UniqueName: \"kubernetes.io/projected/99ccb4b9-6fe9-4280-8765-a7c78ee9892a-kube-api-access-7gvnd\") pod \"redhat-operators-tr74r\" (UID: \"99ccb4b9-6fe9-4280-8765-a7c78ee9892a\") " pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.614644 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0602efa0-2eae-4bfa-a9fb-b12853c13228" path="/var/lib/kubelet/pods/0602efa0-2eae-4bfa-a9fb-b12853c13228/volumes" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.615788 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32247aeb-b5a0-4727-ae80-88aa6b9209f3" path="/var/lib/kubelet/pods/32247aeb-b5a0-4727-ae80-88aa6b9209f3/volumes" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.616929 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37a54549-4f2f-4b92-a1b1-99d418b04e93" path="/var/lib/kubelet/pods/37a54549-4f2f-4b92-a1b1-99d418b04e93/volumes" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.618922 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f909c7-70dc-44c8-8fc3-22cddf5f9f76" path="/var/lib/kubelet/pods/66f909c7-70dc-44c8-8fc3-22cddf5f9f76/volumes" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.623603 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa8250b5-b775-4d1a-b7ee-659aab2dc188" path="/var/lib/kubelet/pods/fa8250b5-b775-4d1a-b7ee-659aab2dc188/volumes" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.676305 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.708913 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f5k6j"] Jan 29 16:36:33 crc kubenswrapper[4685]: W0129 16:36:33.716074 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c3daf83_3283_4698_b95a_b03aafe35302.slice/crio-32640051d2cd5f53102749d11efbbd0f0e37b6f000ba3af505d529743f84bd17 WatchSource:0}: Error finding container 32640051d2cd5f53102749d11efbbd0f0e37b6f000ba3af505d529743f84bd17: Status 404 returned error can't find the container with id 32640051d2cd5f53102749d11efbbd0f0e37b6f000ba3af505d529743f84bd17 Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.880502 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5k6j" event={"ID":"3c3daf83-3283-4698-b95a-b03aafe35302","Type":"ContainerStarted","Data":"eae3fba1037d6e78b5b3f2bfa13e7943c89e84b5a81026047d140e989db07319"} Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.880552 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5k6j" event={"ID":"3c3daf83-3283-4698-b95a-b03aafe35302","Type":"ContainerStarted","Data":"32640051d2cd5f53102749d11efbbd0f0e37b6f000ba3af505d529743f84bd17"} Jan 29 16:36:33 crc kubenswrapper[4685]: I0129 16:36:33.936810 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tr74r"] Jan 29 16:36:33 crc kubenswrapper[4685]: W0129 16:36:33.942915 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99ccb4b9_6fe9_4280_8765_a7c78ee9892a.slice/crio-6d4307003e545a35a3b50c852b60fe49b358e96a765f72e93c87f08152a3a384 WatchSource:0}: Error finding container 6d4307003e545a35a3b50c852b60fe49b358e96a765f72e93c87f08152a3a384: Status 404 returned error can't find the container with id 6d4307003e545a35a3b50c852b60fe49b358e96a765f72e93c87f08152a3a384 Jan 29 16:36:34 crc kubenswrapper[4685]: I0129 16:36:34.897170 4685 generic.go:334] "Generic (PLEG): container finished" podID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" containerID="ec888ee9f3fbc33b5d0fb0cd9bec0231b2b89b5dc85b7af3238ad72e387558db" exitCode=0 Jan 29 16:36:34 crc kubenswrapper[4685]: I0129 16:36:34.897343 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr74r" event={"ID":"99ccb4b9-6fe9-4280-8765-a7c78ee9892a","Type":"ContainerDied","Data":"ec888ee9f3fbc33b5d0fb0cd9bec0231b2b89b5dc85b7af3238ad72e387558db"} Jan 29 16:36:34 crc kubenswrapper[4685]: I0129 16:36:34.897385 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr74r" event={"ID":"99ccb4b9-6fe9-4280-8765-a7c78ee9892a","Type":"ContainerStarted","Data":"6d4307003e545a35a3b50c852b60fe49b358e96a765f72e93c87f08152a3a384"} Jan 29 16:36:34 crc kubenswrapper[4685]: I0129 16:36:34.901308 4685 generic.go:334] "Generic (PLEG): container finished" podID="3c3daf83-3283-4698-b95a-b03aafe35302" containerID="eae3fba1037d6e78b5b3f2bfa13e7943c89e84b5a81026047d140e989db07319" exitCode=0 Jan 29 16:36:34 crc kubenswrapper[4685]: I0129 16:36:34.901366 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5k6j" event={"ID":"3c3daf83-3283-4698-b95a-b03aafe35302","Type":"ContainerDied","Data":"eae3fba1037d6e78b5b3f2bfa13e7943c89e84b5a81026047d140e989db07319"} Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.028533 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.028832 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.028906 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kbbsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-f5k6j_openshift-marketplace(3c3daf83-3283-4698-b95a-b03aafe35302): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.029065 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7gvnd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tr74r_openshift-marketplace(99ccb4b9-6fe9-4280-8765-a7c78ee9892a): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.030170 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.030227 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.545904 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b97g5"] Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.548565 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.551987 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.559545 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b97g5"] Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.706023 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-utilities\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.706080 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-catalog-content\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.706170 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj8ct\" (UniqueName: \"kubernetes.io/projected/beddcef1-54d6-4150-b62f-710112c84ec6-kube-api-access-tj8ct\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.746216 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hhqnq"] Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.747795 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.749567 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.757649 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhqnq"] Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.807629 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-catalog-content\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.807717 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8g4z\" (UniqueName: \"kubernetes.io/projected/465498e2-f29b-47b9-a34d-b770b801f075-kube-api-access-d8g4z\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.807785 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj8ct\" (UniqueName: \"kubernetes.io/projected/beddcef1-54d6-4150-b62f-710112c84ec6-kube-api-access-tj8ct\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.807930 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-catalog-content\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.807980 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-utilities\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.808010 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-utilities\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.808189 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-catalog-content\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.808772 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/beddcef1-54d6-4150-b62f-710112c84ec6-utilities\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.829331 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj8ct\" (UniqueName: \"kubernetes.io/projected/beddcef1-54d6-4150-b62f-710112c84ec6-kube-api-access-tj8ct\") pod \"community-operators-b97g5\" (UID: \"beddcef1-54d6-4150-b62f-710112c84ec6\") " pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.867369 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.908838 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-utilities\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.908887 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8g4z\" (UniqueName: \"kubernetes.io/projected/465498e2-f29b-47b9-a34d-b770b801f075-kube-api-access-d8g4z\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.909003 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-catalog-content\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.909242 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-utilities\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.909358 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/465498e2-f29b-47b9-a34d-b770b801f075-catalog-content\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.909432 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:36:35 crc kubenswrapper[4685]: E0129 16:36:35.909858 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:36:35 crc kubenswrapper[4685]: I0129 16:36:35.930808 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8g4z\" (UniqueName: \"kubernetes.io/projected/465498e2-f29b-47b9-a34d-b770b801f075-kube-api-access-d8g4z\") pod \"certified-operators-hhqnq\" (UID: \"465498e2-f29b-47b9-a34d-b770b801f075\") " pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.065940 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b97g5"] Jan 29 16:36:36 crc kubenswrapper[4685]: W0129 16:36:36.069536 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbeddcef1_54d6_4150_b62f_710112c84ec6.slice/crio-d9518997607361175c6c5b2b943614750e40cc55d448592126e9f2acc83d4a8c WatchSource:0}: Error finding container d9518997607361175c6c5b2b943614750e40cc55d448592126e9f2acc83d4a8c: Status 404 returned error can't find the container with id d9518997607361175c6c5b2b943614750e40cc55d448592126e9f2acc83d4a8c Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.070283 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.245426 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhqnq"] Jan 29 16:36:36 crc kubenswrapper[4685]: W0129 16:36:36.289465 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod465498e2_f29b_47b9_a34d_b770b801f075.slice/crio-e31b4a3d617a51301e050d13169e2c7e6938bc23e8764776de21c3714cd1826c WatchSource:0}: Error finding container e31b4a3d617a51301e050d13169e2c7e6938bc23e8764776de21c3714cd1826c: Status 404 returned error can't find the container with id e31b4a3d617a51301e050d13169e2c7e6938bc23e8764776de21c3714cd1826c Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.916681 4685 generic.go:334] "Generic (PLEG): container finished" podID="465498e2-f29b-47b9-a34d-b770b801f075" containerID="d0edd8ea71942383489ff6551b86288a361ae1bf6db7dd28bd07524f9643192f" exitCode=0 Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.916784 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhqnq" event={"ID":"465498e2-f29b-47b9-a34d-b770b801f075","Type":"ContainerDied","Data":"d0edd8ea71942383489ff6551b86288a361ae1bf6db7dd28bd07524f9643192f"} Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.916823 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhqnq" event={"ID":"465498e2-f29b-47b9-a34d-b770b801f075","Type":"ContainerStarted","Data":"e31b4a3d617a51301e050d13169e2c7e6938bc23e8764776de21c3714cd1826c"} Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.918861 4685 generic.go:334] "Generic (PLEG): container finished" podID="beddcef1-54d6-4150-b62f-710112c84ec6" containerID="e69d9c5b58e2a1fb01445b73a6e45cb610a23e9c62ecda6184c2785e2c2cc01f" exitCode=0 Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.918915 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b97g5" event={"ID":"beddcef1-54d6-4150-b62f-710112c84ec6","Type":"ContainerDied","Data":"e69d9c5b58e2a1fb01445b73a6e45cb610a23e9c62ecda6184c2785e2c2cc01f"} Jan 29 16:36:36 crc kubenswrapper[4685]: I0129 16:36:36.918995 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b97g5" event={"ID":"beddcef1-54d6-4150-b62f-710112c84ec6","Type":"ContainerStarted","Data":"d9518997607361175c6c5b2b943614750e40cc55d448592126e9f2acc83d4a8c"} Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.056073 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.056252 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d8g4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hhqnq_openshift-marketplace(465498e2-f29b-47b9-a34d-b770b801f075): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.057534 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.059246 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.059463 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tj8ct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-b97g5_openshift-marketplace(beddcef1-54d6-4150-b62f-710112c84ec6): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.060694 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.929062 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:36:37 crc kubenswrapper[4685]: E0129 16:36:37.929107 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:36:47 crc kubenswrapper[4685]: E0129 16:36:47.750043 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:36:47 crc kubenswrapper[4685]: E0129 16:36:47.751092 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kbbsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-f5k6j_openshift-marketplace(3c3daf83-3283-4698-b95a-b03aafe35302): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:47 crc kubenswrapper[4685]: E0129 16:36:47.752385 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.733682 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.733910 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tj8ct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-b97g5_openshift-marketplace(beddcef1-54d6-4150-b62f-710112c84ec6): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.736004 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.736081 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.736250 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7gvnd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tr74r_openshift-marketplace(99ccb4b9-6fe9-4280-8765-a7c78ee9892a): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:49 crc kubenswrapper[4685]: E0129 16:36:49.737536 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:36:52 crc kubenswrapper[4685]: E0129 16:36:52.724634 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:36:52 crc kubenswrapper[4685]: E0129 16:36:52.725095 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d8g4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hhqnq_openshift-marketplace(465498e2-f29b-47b9-a34d-b770b801f075): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:36:52 crc kubenswrapper[4685]: E0129 16:36:52.726672 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:36:59 crc kubenswrapper[4685]: E0129 16:36:59.604480 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:37:02 crc kubenswrapper[4685]: E0129 16:37:02.605647 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:37:04 crc kubenswrapper[4685]: E0129 16:37:04.603345 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:37:06 crc kubenswrapper[4685]: E0129 16:37:06.604662 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:37:13 crc kubenswrapper[4685]: E0129 16:37:13.779761 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 29 16:37:13 crc kubenswrapper[4685]: E0129 16:37:13.780860 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kbbsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-f5k6j_openshift-marketplace(3c3daf83-3283-4698-b95a-b03aafe35302): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:37:13 crc kubenswrapper[4685]: E0129 16:37:13.782124 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-marketplace-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:37:14 crc kubenswrapper[4685]: E0129 16:37:14.724071 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 29 16:37:14 crc kubenswrapper[4685]: E0129 16:37:14.724244 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tj8ct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-b97g5_openshift-marketplace(beddcef1-54d6-4150-b62f-710112c84ec6): ErrImagePull: initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:37:14 crc kubenswrapper[4685]: E0129 16:37:14.725433 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/community-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:37:15 crc kubenswrapper[4685]: E0129 16:37:15.721734 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 29 16:37:15 crc kubenswrapper[4685]: E0129 16:37:15.721886 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7gvnd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tr74r_openshift-marketplace(99ccb4b9-6fe9-4280-8765-a7c78ee9892a): ErrImagePull: initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:37:15 crc kubenswrapper[4685]: E0129 16:37:15.723112 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/redhat-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:37:17 crc kubenswrapper[4685]: E0129 16:37:17.724298 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 29 16:37:17 crc kubenswrapper[4685]: E0129 16:37:17.724485 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d8g4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hhqnq_openshift-marketplace(465498e2-f29b-47b9-a34d-b770b801f075): ErrImagePull: initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)" logger="UnhandledError" Jan 29 16:37:17 crc kubenswrapper[4685]: E0129 16:37:17.725645 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"initializing source docker://registry.redhat.io/redhat/certified-operator-index:v4.18: Requesting bearer token: invalid status code from registry 403 (Forbidden)\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:37:26 crc kubenswrapper[4685]: E0129 16:37:26.606248 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:37:27 crc kubenswrapper[4685]: E0129 16:37:27.604010 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:37:28 crc kubenswrapper[4685]: E0129 16:37:28.604180 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:37:31 crc kubenswrapper[4685]: E0129 16:37:31.604301 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:37:34 crc kubenswrapper[4685]: I0129 16:37:34.331052 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:37:34 crc kubenswrapper[4685]: I0129 16:37:34.331501 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:37:39 crc kubenswrapper[4685]: E0129 16:37:39.605699 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:37:39 crc kubenswrapper[4685]: E0129 16:37:39.605704 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:37:42 crc kubenswrapper[4685]: E0129 16:37:42.604709 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:37:45 crc kubenswrapper[4685]: E0129 16:37:45.603823 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:37:50 crc kubenswrapper[4685]: E0129 16:37:50.604538 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-b97g5" podUID="beddcef1-54d6-4150-b62f-710112c84ec6" Jan 29 16:37:51 crc kubenswrapper[4685]: E0129 16:37:51.605012 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-f5k6j" podUID="3c3daf83-3283-4698-b95a-b03aafe35302" Jan 29 16:37:53 crc kubenswrapper[4685]: E0129 16:37:53.611423 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tr74r" podUID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" Jan 29 16:37:57 crc kubenswrapper[4685]: E0129 16:37:57.605121 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" Jan 29 16:38:04 crc kubenswrapper[4685]: I0129 16:38:04.331368 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:38:04 crc kubenswrapper[4685]: I0129 16:38:04.332303 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:38:04 crc kubenswrapper[4685]: I0129 16:38:04.456657 4685 generic.go:334] "Generic (PLEG): container finished" podID="3c3daf83-3283-4698-b95a-b03aafe35302" containerID="6a0db5c4581d248a136111d0393815fda8dfcc84677e62ebe9ed5c6fb957dcf2" exitCode=0 Jan 29 16:38:04 crc kubenswrapper[4685]: I0129 16:38:04.456712 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5k6j" event={"ID":"3c3daf83-3283-4698-b95a-b03aafe35302","Type":"ContainerDied","Data":"6a0db5c4581d248a136111d0393815fda8dfcc84677e62ebe9ed5c6fb957dcf2"} Jan 29 16:38:06 crc kubenswrapper[4685]: I0129 16:38:06.469169 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f5k6j" event={"ID":"3c3daf83-3283-4698-b95a-b03aafe35302","Type":"ContainerStarted","Data":"0863339e853851cd480a029c1d64c3222791dc5525bd6ba4dc49338760a0754a"} Jan 29 16:38:06 crc kubenswrapper[4685]: I0129 16:38:06.495603 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f5k6j" podStartSLOduration=2.715808216 podStartE2EDuration="1m33.49558705s" podCreationTimestamp="2026-01-29 16:36:33 +0000 UTC" firstStartedPulling="2026-01-29 16:36:34.903792134 +0000 UTC m=+622.101148406" lastFinishedPulling="2026-01-29 16:38:05.683570938 +0000 UTC m=+712.880927240" observedRunningTime="2026-01-29 16:38:06.490290396 +0000 UTC m=+713.687646678" watchObservedRunningTime="2026-01-29 16:38:06.49558705 +0000 UTC m=+713.692943312" Jan 29 16:38:10 crc kubenswrapper[4685]: I0129 16:38:10.504135 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b97g5" event={"ID":"beddcef1-54d6-4150-b62f-710112c84ec6","Type":"ContainerStarted","Data":"600bc4d134b559221d4e1c27158006cb63d51cb5e7ea5549acf419e0b824c696"} Jan 29 16:38:10 crc kubenswrapper[4685]: I0129 16:38:10.506532 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr74r" event={"ID":"99ccb4b9-6fe9-4280-8765-a7c78ee9892a","Type":"ContainerStarted","Data":"0abef253484ee526b5e6ae4eac29065fc19368aa7f45bcc8c807061c17c7ad93"} Jan 29 16:38:11 crc kubenswrapper[4685]: I0129 16:38:11.516428 4685 generic.go:334] "Generic (PLEG): container finished" podID="beddcef1-54d6-4150-b62f-710112c84ec6" containerID="600bc4d134b559221d4e1c27158006cb63d51cb5e7ea5549acf419e0b824c696" exitCode=0 Jan 29 16:38:11 crc kubenswrapper[4685]: I0129 16:38:11.516477 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b97g5" event={"ID":"beddcef1-54d6-4150-b62f-710112c84ec6","Type":"ContainerDied","Data":"600bc4d134b559221d4e1c27158006cb63d51cb5e7ea5549acf419e0b824c696"} Jan 29 16:38:12 crc kubenswrapper[4685]: I0129 16:38:12.524714 4685 generic.go:334] "Generic (PLEG): container finished" podID="99ccb4b9-6fe9-4280-8765-a7c78ee9892a" containerID="0abef253484ee526b5e6ae4eac29065fc19368aa7f45bcc8c807061c17c7ad93" exitCode=0 Jan 29 16:38:12 crc kubenswrapper[4685]: I0129 16:38:12.524889 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr74r" event={"ID":"99ccb4b9-6fe9-4280-8765-a7c78ee9892a","Type":"ContainerDied","Data":"0abef253484ee526b5e6ae4eac29065fc19368aa7f45bcc8c807061c17c7ad93"} Jan 29 16:38:13 crc kubenswrapper[4685]: I0129 16:38:13.467872 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:38:13 crc kubenswrapper[4685]: I0129 16:38:13.468273 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:38:13 crc kubenswrapper[4685]: I0129 16:38:13.534322 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b97g5" event={"ID":"beddcef1-54d6-4150-b62f-710112c84ec6","Type":"ContainerStarted","Data":"846efb2c875ed4a04c807c349125971a148f1f048a1ebdde169f1f15b45ddcab"} Jan 29 16:38:13 crc kubenswrapper[4685]: I0129 16:38:13.540236 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:38:13 crc kubenswrapper[4685]: I0129 16:38:13.552690 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b97g5" podStartSLOduration=2.424724597 podStartE2EDuration="1m38.552669559s" podCreationTimestamp="2026-01-29 16:36:35 +0000 UTC" firstStartedPulling="2026-01-29 16:36:36.921593229 +0000 UTC m=+624.118949521" lastFinishedPulling="2026-01-29 16:38:13.049538221 +0000 UTC m=+720.246894483" observedRunningTime="2026-01-29 16:38:13.550039828 +0000 UTC m=+720.747396110" watchObservedRunningTime="2026-01-29 16:38:13.552669559 +0000 UTC m=+720.750025821" Jan 29 16:38:14 crc kubenswrapper[4685]: I0129 16:38:14.541548 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr74r" event={"ID":"99ccb4b9-6fe9-4280-8765-a7c78ee9892a","Type":"ContainerStarted","Data":"7ffd35ac6977321112ae69b32ce038eab6758adc69cd5bedf9be3e3996c72bb6"} Jan 29 16:38:14 crc kubenswrapper[4685]: I0129 16:38:14.585820 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f5k6j" Jan 29 16:38:14 crc kubenswrapper[4685]: I0129 16:38:14.607183 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tr74r" podStartSLOduration=2.93127548 podStartE2EDuration="1m41.607166558s" podCreationTimestamp="2026-01-29 16:36:33 +0000 UTC" firstStartedPulling="2026-01-29 16:36:34.90079181 +0000 UTC m=+622.098148082" lastFinishedPulling="2026-01-29 16:38:13.576682898 +0000 UTC m=+720.774039160" observedRunningTime="2026-01-29 16:38:14.56176524 +0000 UTC m=+721.759121512" watchObservedRunningTime="2026-01-29 16:38:14.607166558 +0000 UTC m=+721.804522830" Jan 29 16:38:15 crc kubenswrapper[4685]: I0129 16:38:15.868091 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:38:15 crc kubenswrapper[4685]: I0129 16:38:15.868494 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:38:15 crc kubenswrapper[4685]: I0129 16:38:15.931760 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:38:17 crc kubenswrapper[4685]: I0129 16:38:17.569567 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhqnq" event={"ID":"465498e2-f29b-47b9-a34d-b770b801f075","Type":"ContainerStarted","Data":"7fb8caae2f925d388765264c8c74182a37169329ed6f3220685a6d829cb3cebd"} Jan 29 16:38:20 crc kubenswrapper[4685]: I0129 16:38:20.587422 4685 generic.go:334] "Generic (PLEG): container finished" podID="465498e2-f29b-47b9-a34d-b770b801f075" containerID="7fb8caae2f925d388765264c8c74182a37169329ed6f3220685a6d829cb3cebd" exitCode=0 Jan 29 16:38:20 crc kubenswrapper[4685]: I0129 16:38:20.587480 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhqnq" event={"ID":"465498e2-f29b-47b9-a34d-b770b801f075","Type":"ContainerDied","Data":"7fb8caae2f925d388765264c8c74182a37169329ed6f3220685a6d829cb3cebd"} Jan 29 16:38:23 crc kubenswrapper[4685]: I0129 16:38:23.677544 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:38:23 crc kubenswrapper[4685]: I0129 16:38:23.677879 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:38:23 crc kubenswrapper[4685]: I0129 16:38:23.718814 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:38:24 crc kubenswrapper[4685]: I0129 16:38:24.646805 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tr74r" Jan 29 16:38:25 crc kubenswrapper[4685]: I0129 16:38:25.612015 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhqnq" event={"ID":"465498e2-f29b-47b9-a34d-b770b801f075","Type":"ContainerStarted","Data":"dfbbfdc19d1881a9099f2be6d4bce5ab2077ec5f8dca5c9ce87ed41069940276"} Jan 29 16:38:25 crc kubenswrapper[4685]: I0129 16:38:25.627667 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hhqnq" podStartSLOduration=2.728027321 podStartE2EDuration="1m50.627651769s" podCreationTimestamp="2026-01-29 16:36:35 +0000 UTC" firstStartedPulling="2026-01-29 16:36:36.921244701 +0000 UTC m=+624.118600973" lastFinishedPulling="2026-01-29 16:38:24.820869139 +0000 UTC m=+732.018225421" observedRunningTime="2026-01-29 16:38:25.626127128 +0000 UTC m=+732.823483400" watchObservedRunningTime="2026-01-29 16:38:25.627651769 +0000 UTC m=+732.825008031" Jan 29 16:38:25 crc kubenswrapper[4685]: I0129 16:38:25.911951 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b97g5" Jan 29 16:38:26 crc kubenswrapper[4685]: I0129 16:38:26.071522 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:38:26 crc kubenswrapper[4685]: I0129 16:38:26.071580 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:38:27 crc kubenswrapper[4685]: I0129 16:38:27.117795 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-hhqnq" podUID="465498e2-f29b-47b9-a34d-b770b801f075" containerName="registry-server" probeResult="failure" output=< Jan 29 16:38:27 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Jan 29 16:38:27 crc kubenswrapper[4685]: > Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.331382 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.332021 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.332102 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.333230 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.333304 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341" gracePeriod=600 Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.661773 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341" exitCode=0 Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.661849 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341"} Jan 29 16:38:34 crc kubenswrapper[4685]: I0129 16:38:34.661899 4685 scope.go:117] "RemoveContainer" containerID="a0419951284be5ffe9ffe438c38e991f2af3be8c6f4e6dded1d7d5d23623be54" Jan 29 16:38:35 crc kubenswrapper[4685]: I0129 16:38:35.668822 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349"} Jan 29 16:38:36 crc kubenswrapper[4685]: I0129 16:38:36.113849 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:38:36 crc kubenswrapper[4685]: I0129 16:38:36.153783 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hhqnq" Jan 29 16:38:49 crc kubenswrapper[4685]: I0129 16:38:49.183147 4685 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.595976 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.598624 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.614953 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.765769 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.765907 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqzjz\" (UniqueName: \"kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.766051 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.867517 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.867594 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqzjz\" (UniqueName: \"kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.867639 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.868156 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.868175 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.899806 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqzjz\" (UniqueName: \"kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz\") pod \"certified-operators-kqhbk\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:11 crc kubenswrapper[4685]: I0129 16:39:11.925860 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:12 crc kubenswrapper[4685]: I0129 16:39:12.120176 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:12 crc kubenswrapper[4685]: I0129 16:39:12.909075 4685 generic.go:334] "Generic (PLEG): container finished" podID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerID="337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43" exitCode=0 Jan 29 16:39:12 crc kubenswrapper[4685]: I0129 16:39:12.909156 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerDied","Data":"337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43"} Jan 29 16:39:12 crc kubenswrapper[4685]: I0129 16:39:12.910609 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerStarted","Data":"b048219846e36199d5c2fd84e1b9f06bb173051c47e6d385c755c6f254e1d7fb"} Jan 29 16:39:12 crc kubenswrapper[4685]: I0129 16:39:12.910571 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 29 16:39:13 crc kubenswrapper[4685]: I0129 16:39:13.920310 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerStarted","Data":"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799"} Jan 29 16:39:14 crc kubenswrapper[4685]: I0129 16:39:14.928854 4685 generic.go:334] "Generic (PLEG): container finished" podID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerID="1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799" exitCode=0 Jan 29 16:39:14 crc kubenswrapper[4685]: I0129 16:39:14.928909 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerDied","Data":"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799"} Jan 29 16:39:15 crc kubenswrapper[4685]: I0129 16:39:15.934729 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerStarted","Data":"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5"} Jan 29 16:39:15 crc kubenswrapper[4685]: I0129 16:39:15.954975 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kqhbk" podStartSLOduration=2.5080279 podStartE2EDuration="4.954952908s" podCreationTimestamp="2026-01-29 16:39:11 +0000 UTC" firstStartedPulling="2026-01-29 16:39:12.910369026 +0000 UTC m=+780.107725288" lastFinishedPulling="2026-01-29 16:39:15.357293994 +0000 UTC m=+782.554650296" observedRunningTime="2026-01-29 16:39:15.949329736 +0000 UTC m=+783.146685998" watchObservedRunningTime="2026-01-29 16:39:15.954952908 +0000 UTC m=+783.152309190" Jan 29 16:39:21 crc kubenswrapper[4685]: I0129 16:39:21.926267 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:21 crc kubenswrapper[4685]: I0129 16:39:21.926701 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:21 crc kubenswrapper[4685]: I0129 16:39:21.999655 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:23 crc kubenswrapper[4685]: I0129 16:39:23.035438 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:23 crc kubenswrapper[4685]: I0129 16:39:23.097077 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:24 crc kubenswrapper[4685]: I0129 16:39:24.991889 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kqhbk" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="registry-server" containerID="cri-o://0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5" gracePeriod=2 Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.442011 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.476144 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content\") pod \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.476200 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities\") pod \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.476232 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqzjz\" (UniqueName: \"kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz\") pod \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\" (UID: \"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd\") " Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.477059 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities" (OuterVolumeSpecName: "utilities") pod "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" (UID: "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.483165 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz" (OuterVolumeSpecName: "kube-api-access-qqzjz") pod "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" (UID: "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd"). InnerVolumeSpecName "kube-api-access-qqzjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.522957 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" (UID: "5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.577162 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqzjz\" (UniqueName: \"kubernetes.io/projected/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-kube-api-access-qqzjz\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.577203 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:26 crc kubenswrapper[4685]: I0129 16:39:26.577214 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.004118 4685 generic.go:334] "Generic (PLEG): container finished" podID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerID="0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5" exitCode=0 Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.004161 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerDied","Data":"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5"} Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.004188 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kqhbk" event={"ID":"5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd","Type":"ContainerDied","Data":"b048219846e36199d5c2fd84e1b9f06bb173051c47e6d385c755c6f254e1d7fb"} Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.004219 4685 scope.go:117] "RemoveContainer" containerID="0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.004273 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kqhbk" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.026911 4685 scope.go:117] "RemoveContainer" containerID="1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.038265 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.044693 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kqhbk"] Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.062678 4685 scope.go:117] "RemoveContainer" containerID="337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.080196 4685 scope.go:117] "RemoveContainer" containerID="0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5" Jan 29 16:39:27 crc kubenswrapper[4685]: E0129 16:39:27.080631 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5\": container with ID starting with 0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5 not found: ID does not exist" containerID="0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.080673 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5"} err="failed to get container status \"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5\": rpc error: code = NotFound desc = could not find container \"0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5\": container with ID starting with 0574fa77825311c9e0fa51fa3962f4c716cb426449a178c04c21cbdcc1d886f5 not found: ID does not exist" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.080700 4685 scope.go:117] "RemoveContainer" containerID="1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799" Jan 29 16:39:27 crc kubenswrapper[4685]: E0129 16:39:27.080967 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799\": container with ID starting with 1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799 not found: ID does not exist" containerID="1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.080988 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799"} err="failed to get container status \"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799\": rpc error: code = NotFound desc = could not find container \"1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799\": container with ID starting with 1627d2c20b8dda63761eea9d9cdeaede02a95bc5ac80f434e59a0d9c25699799 not found: ID does not exist" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.081003 4685 scope.go:117] "RemoveContainer" containerID="337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43" Jan 29 16:39:27 crc kubenswrapper[4685]: E0129 16:39:27.081169 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43\": container with ID starting with 337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43 not found: ID does not exist" containerID="337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.081185 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43"} err="failed to get container status \"337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43\": rpc error: code = NotFound desc = could not find container \"337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43\": container with ID starting with 337bc8b227fd608715dc5e1b77fb2f08712bf883a71d5d57624d52cd36b8ad43 not found: ID does not exist" Jan 29 16:39:27 crc kubenswrapper[4685]: I0129 16:39:27.616075 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" path="/var/lib/kubelet/pods/5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd/volumes" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.808041 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:42 crc kubenswrapper[4685]: E0129 16:39:42.808899 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="registry-server" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.808915 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="registry-server" Jan 29 16:39:42 crc kubenswrapper[4685]: E0129 16:39:42.808935 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="extract-utilities" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.808943 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="extract-utilities" Jan 29 16:39:42 crc kubenswrapper[4685]: E0129 16:39:42.808967 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="extract-content" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.808980 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="extract-content" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.809107 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5994c3c8-ffe3-4736-a8a5-93a9c30eb0bd" containerName="registry-server" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.810038 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.818631 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.873900 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.874307 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.874352 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrq8l\" (UniqueName: \"kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.975131 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.975190 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.975236 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrq8l\" (UniqueName: \"kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.975747 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.975851 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:42 crc kubenswrapper[4685]: I0129 16:39:42.993460 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrq8l\" (UniqueName: \"kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l\") pod \"community-operators-ldjcn\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:43 crc kubenswrapper[4685]: I0129 16:39:43.130519 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:43 crc kubenswrapper[4685]: I0129 16:39:43.365291 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:44 crc kubenswrapper[4685]: I0129 16:39:44.111054 4685 generic.go:334] "Generic (PLEG): container finished" podID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerID="4edc5abc025b2f772780ec80332e4a0fa5941cc90c6c3c93f114a465d3dc42e5" exitCode=0 Jan 29 16:39:44 crc kubenswrapper[4685]: I0129 16:39:44.111120 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerDied","Data":"4edc5abc025b2f772780ec80332e4a0fa5941cc90c6c3c93f114a465d3dc42e5"} Jan 29 16:39:44 crc kubenswrapper[4685]: I0129 16:39:44.111376 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerStarted","Data":"4791de2aa4677a8b377476bbf2811cd708827bf740dbfeb7471f0fe86fa786ab"} Jan 29 16:39:45 crc kubenswrapper[4685]: I0129 16:39:45.121512 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerStarted","Data":"52a8b5f4cd437052032f7ba7931b93e25213831e20d2f9609689227892742c5a"} Jan 29 16:39:46 crc kubenswrapper[4685]: I0129 16:39:46.128071 4685 generic.go:334] "Generic (PLEG): container finished" podID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerID="52a8b5f4cd437052032f7ba7931b93e25213831e20d2f9609689227892742c5a" exitCode=0 Jan 29 16:39:46 crc kubenswrapper[4685]: I0129 16:39:46.128115 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerDied","Data":"52a8b5f4cd437052032f7ba7931b93e25213831e20d2f9609689227892742c5a"} Jan 29 16:39:47 crc kubenswrapper[4685]: I0129 16:39:47.144264 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerStarted","Data":"eb1fbbd713729e9ab5cb25f5d4a1c71e6a4297eaab0a7bed0f21233adf9f63c4"} Jan 29 16:39:47 crc kubenswrapper[4685]: I0129 16:39:47.165578 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ldjcn" podStartSLOduration=2.76023805 podStartE2EDuration="5.165545163s" podCreationTimestamp="2026-01-29 16:39:42 +0000 UTC" firstStartedPulling="2026-01-29 16:39:44.11464025 +0000 UTC m=+811.311996512" lastFinishedPulling="2026-01-29 16:39:46.519947363 +0000 UTC m=+813.717303625" observedRunningTime="2026-01-29 16:39:47.162811249 +0000 UTC m=+814.360167571" watchObservedRunningTime="2026-01-29 16:39:47.165545163 +0000 UTC m=+814.362901445" Jan 29 16:39:53 crc kubenswrapper[4685]: I0129 16:39:53.131239 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:53 crc kubenswrapper[4685]: I0129 16:39:53.132553 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:53 crc kubenswrapper[4685]: I0129 16:39:53.193748 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:53 crc kubenswrapper[4685]: I0129 16:39:53.441161 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:53 crc kubenswrapper[4685]: I0129 16:39:53.490018 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:55 crc kubenswrapper[4685]: I0129 16:39:55.434625 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ldjcn" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="registry-server" containerID="cri-o://eb1fbbd713729e9ab5cb25f5d4a1c71e6a4297eaab0a7bed0f21233adf9f63c4" gracePeriod=2 Jan 29 16:39:56 crc kubenswrapper[4685]: I0129 16:39:56.442074 4685 generic.go:334] "Generic (PLEG): container finished" podID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerID="eb1fbbd713729e9ab5cb25f5d4a1c71e6a4297eaab0a7bed0f21233adf9f63c4" exitCode=0 Jan 29 16:39:56 crc kubenswrapper[4685]: I0129 16:39:56.442104 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerDied","Data":"eb1fbbd713729e9ab5cb25f5d4a1c71e6a4297eaab0a7bed0f21233adf9f63c4"} Jan 29 16:39:56 crc kubenswrapper[4685]: I0129 16:39:56.998143 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.168899 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrq8l\" (UniqueName: \"kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l\") pod \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.169758 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities\") pod \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.169820 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content\") pod \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\" (UID: \"b2ea6160-e4e3-46b1-a762-0717bbeffc23\") " Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.170832 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities" (OuterVolumeSpecName: "utilities") pod "b2ea6160-e4e3-46b1-a762-0717bbeffc23" (UID: "b2ea6160-e4e3-46b1-a762-0717bbeffc23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.177770 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l" (OuterVolumeSpecName: "kube-api-access-lrq8l") pod "b2ea6160-e4e3-46b1-a762-0717bbeffc23" (UID: "b2ea6160-e4e3-46b1-a762-0717bbeffc23"). InnerVolumeSpecName "kube-api-access-lrq8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.271280 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.271344 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrq8l\" (UniqueName: \"kubernetes.io/projected/b2ea6160-e4e3-46b1-a762-0717bbeffc23-kube-api-access-lrq8l\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.451163 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldjcn" event={"ID":"b2ea6160-e4e3-46b1-a762-0717bbeffc23","Type":"ContainerDied","Data":"4791de2aa4677a8b377476bbf2811cd708827bf740dbfeb7471f0fe86fa786ab"} Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.451226 4685 scope.go:117] "RemoveContainer" containerID="eb1fbbd713729e9ab5cb25f5d4a1c71e6a4297eaab0a7bed0f21233adf9f63c4" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.451238 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldjcn" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.474667 4685 scope.go:117] "RemoveContainer" containerID="52a8b5f4cd437052032f7ba7931b93e25213831e20d2f9609689227892742c5a" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.489203 4685 scope.go:117] "RemoveContainer" containerID="4edc5abc025b2f772780ec80332e4a0fa5941cc90c6c3c93f114a465d3dc42e5" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.686700 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2ea6160-e4e3-46b1-a762-0717bbeffc23" (UID: "b2ea6160-e4e3-46b1-a762-0717bbeffc23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.777315 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2ea6160-e4e3-46b1-a762-0717bbeffc23-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.779611 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:57 crc kubenswrapper[4685]: I0129 16:39:57.783031 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ldjcn"] Jan 29 16:39:59 crc kubenswrapper[4685]: I0129 16:39:59.608826 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" path="/var/lib/kubelet/pods/b2ea6160-e4e3-46b1-a762-0717bbeffc23/volumes" Jan 29 16:41:04 crc kubenswrapper[4685]: I0129 16:41:04.330702 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:41:04 crc kubenswrapper[4685]: I0129 16:41:04.332192 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:41:34 crc kubenswrapper[4685]: I0129 16:41:34.330764 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:41:34 crc kubenswrapper[4685]: I0129 16:41:34.331813 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.217569 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:40 crc kubenswrapper[4685]: E0129 16:41:40.218424 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="registry-server" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.218438 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="registry-server" Jan 29 16:41:40 crc kubenswrapper[4685]: E0129 16:41:40.218455 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="extract-utilities" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.218464 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="extract-utilities" Jan 29 16:41:40 crc kubenswrapper[4685]: E0129 16:41:40.218483 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="extract-content" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.218491 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="extract-content" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.218616 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ea6160-e4e3-46b1-a762-0717bbeffc23" containerName="registry-server" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.219489 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.237926 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.343697 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.343794 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6hqs\" (UniqueName: \"kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.343824 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.445246 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.445349 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6hqs\" (UniqueName: \"kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.445406 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.445876 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.445966 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.473423 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6hqs\" (UniqueName: \"kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs\") pod \"redhat-marketplace-cmrrx\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.536129 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:40 crc kubenswrapper[4685]: I0129 16:41:40.775405 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:41 crc kubenswrapper[4685]: I0129 16:41:41.063584 4685 generic.go:334] "Generic (PLEG): container finished" podID="e069be4a-5111-4620-aada-998486c4df05" containerID="694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1" exitCode=0 Jan 29 16:41:41 crc kubenswrapper[4685]: I0129 16:41:41.063663 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerDied","Data":"694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1"} Jan 29 16:41:41 crc kubenswrapper[4685]: I0129 16:41:41.063890 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerStarted","Data":"902e62e758a0e153e937373865972224d940dfeec8d47cee116cd75fa7e906b9"} Jan 29 16:41:42 crc kubenswrapper[4685]: I0129 16:41:42.071168 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerStarted","Data":"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6"} Jan 29 16:41:43 crc kubenswrapper[4685]: I0129 16:41:43.080329 4685 generic.go:334] "Generic (PLEG): container finished" podID="e069be4a-5111-4620-aada-998486c4df05" containerID="d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6" exitCode=0 Jan 29 16:41:43 crc kubenswrapper[4685]: I0129 16:41:43.080445 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerDied","Data":"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6"} Jan 29 16:41:44 crc kubenswrapper[4685]: I0129 16:41:44.087589 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerStarted","Data":"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e"} Jan 29 16:41:44 crc kubenswrapper[4685]: I0129 16:41:44.109983 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cmrrx" podStartSLOduration=1.666347295 podStartE2EDuration="4.109953427s" podCreationTimestamp="2026-01-29 16:41:40 +0000 UTC" firstStartedPulling="2026-01-29 16:41:41.064955473 +0000 UTC m=+928.262311735" lastFinishedPulling="2026-01-29 16:41:43.508561595 +0000 UTC m=+930.705917867" observedRunningTime="2026-01-29 16:41:44.104157608 +0000 UTC m=+931.301513890" watchObservedRunningTime="2026-01-29 16:41:44.109953427 +0000 UTC m=+931.307309699" Jan 29 16:41:50 crc kubenswrapper[4685]: I0129 16:41:50.536678 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:50 crc kubenswrapper[4685]: I0129 16:41:50.537436 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:50 crc kubenswrapper[4685]: I0129 16:41:50.609935 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:51 crc kubenswrapper[4685]: I0129 16:41:51.194555 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:51 crc kubenswrapper[4685]: I0129 16:41:51.255952 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.140791 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cmrrx" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="registry-server" containerID="cri-o://99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e" gracePeriod=2 Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.509146 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.626196 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content\") pod \"e069be4a-5111-4620-aada-998486c4df05\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.626238 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities\") pod \"e069be4a-5111-4620-aada-998486c4df05\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.626355 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6hqs\" (UniqueName: \"kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs\") pod \"e069be4a-5111-4620-aada-998486c4df05\" (UID: \"e069be4a-5111-4620-aada-998486c4df05\") " Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.627605 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities" (OuterVolumeSpecName: "utilities") pod "e069be4a-5111-4620-aada-998486c4df05" (UID: "e069be4a-5111-4620-aada-998486c4df05"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.631836 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs" (OuterVolumeSpecName: "kube-api-access-j6hqs") pod "e069be4a-5111-4620-aada-998486c4df05" (UID: "e069be4a-5111-4620-aada-998486c4df05"). InnerVolumeSpecName "kube-api-access-j6hqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.646216 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e069be4a-5111-4620-aada-998486c4df05" (UID: "e069be4a-5111-4620-aada-998486c4df05"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.728543 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6hqs\" (UniqueName: \"kubernetes.io/projected/e069be4a-5111-4620-aada-998486c4df05-kube-api-access-j6hqs\") on node \"crc\" DevicePath \"\"" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.729100 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:41:53 crc kubenswrapper[4685]: I0129 16:41:53.729158 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e069be4a-5111-4620-aada-998486c4df05-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.150953 4685 generic.go:334] "Generic (PLEG): container finished" podID="e069be4a-5111-4620-aada-998486c4df05" containerID="99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e" exitCode=0 Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.151018 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cmrrx" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.151025 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerDied","Data":"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e"} Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.151231 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cmrrx" event={"ID":"e069be4a-5111-4620-aada-998486c4df05","Type":"ContainerDied","Data":"902e62e758a0e153e937373865972224d940dfeec8d47cee116cd75fa7e906b9"} Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.151297 4685 scope.go:117] "RemoveContainer" containerID="99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.175540 4685 scope.go:117] "RemoveContainer" containerID="d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.187943 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.192336 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cmrrx"] Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.205119 4685 scope.go:117] "RemoveContainer" containerID="694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.222748 4685 scope.go:117] "RemoveContainer" containerID="99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e" Jan 29 16:41:54 crc kubenswrapper[4685]: E0129 16:41:54.223314 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e\": container with ID starting with 99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e not found: ID does not exist" containerID="99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.223348 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e"} err="failed to get container status \"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e\": rpc error: code = NotFound desc = could not find container \"99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e\": container with ID starting with 99baa6ff2ea9ba2df4c6d5adefc16bb67a52212d20294c0cb1d72a50cef9771e not found: ID does not exist" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.223368 4685 scope.go:117] "RemoveContainer" containerID="d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6" Jan 29 16:41:54 crc kubenswrapper[4685]: E0129 16:41:54.223726 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6\": container with ID starting with d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6 not found: ID does not exist" containerID="d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.223756 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6"} err="failed to get container status \"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6\": rpc error: code = NotFound desc = could not find container \"d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6\": container with ID starting with d4fed51e359d672adfa7c44fa5c3d3f36b72c0f22c5f2309d76af3c9923515b6 not found: ID does not exist" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.223777 4685 scope.go:117] "RemoveContainer" containerID="694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1" Jan 29 16:41:54 crc kubenswrapper[4685]: E0129 16:41:54.224132 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1\": container with ID starting with 694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1 not found: ID does not exist" containerID="694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1" Jan 29 16:41:54 crc kubenswrapper[4685]: I0129 16:41:54.224163 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1"} err="failed to get container status \"694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1\": rpc error: code = NotFound desc = could not find container \"694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1\": container with ID starting with 694515a05b7dc61a9180fad70467b76cf23626d0387bd7972855b02831b6ede1 not found: ID does not exist" Jan 29 16:41:55 crc kubenswrapper[4685]: I0129 16:41:55.610596 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e069be4a-5111-4620-aada-998486c4df05" path="/var/lib/kubelet/pods/e069be4a-5111-4620-aada-998486c4df05/volumes" Jan 29 16:42:04 crc kubenswrapper[4685]: I0129 16:42:04.331285 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:42:04 crc kubenswrapper[4685]: I0129 16:42:04.332022 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:42:04 crc kubenswrapper[4685]: I0129 16:42:04.332111 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:42:04 crc kubenswrapper[4685]: I0129 16:42:04.332854 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:42:04 crc kubenswrapper[4685]: I0129 16:42:04.332932 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349" gracePeriod=600 Jan 29 16:42:05 crc kubenswrapper[4685]: I0129 16:42:05.215459 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349" exitCode=0 Jan 29 16:42:05 crc kubenswrapper[4685]: I0129 16:42:05.215530 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349"} Jan 29 16:42:05 crc kubenswrapper[4685]: I0129 16:42:05.215904 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc"} Jan 29 16:42:05 crc kubenswrapper[4685]: I0129 16:42:05.215932 4685 scope.go:117] "RemoveContainer" containerID="0631ee68681351da7c993ddc9a5145cf505e96a07fb73bc3744fa991be1ba341" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.650446 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:20 crc kubenswrapper[4685]: E0129 16:42:20.651538 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="extract-content" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.651574 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="extract-content" Jan 29 16:42:20 crc kubenswrapper[4685]: E0129 16:42:20.651601 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="registry-server" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.651614 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="registry-server" Jan 29 16:42:20 crc kubenswrapper[4685]: E0129 16:42:20.651642 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="extract-utilities" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.651654 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="extract-utilities" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.651844 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e069be4a-5111-4620-aada-998486c4df05" containerName="registry-server" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.653570 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.660728 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.787095 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.787144 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.787176 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzcjt\" (UniqueName: \"kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.888227 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.888278 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.888717 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.888752 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzcjt\" (UniqueName: \"kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.888821 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.908075 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzcjt\" (UniqueName: \"kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt\") pod \"redhat-operators-6p7fx\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:20 crc kubenswrapper[4685]: I0129 16:42:20.974467 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:21 crc kubenswrapper[4685]: I0129 16:42:21.174485 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:21 crc kubenswrapper[4685]: W0129 16:42:21.179323 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod277a4a27_ec7f_47d3_a98b_ea05b0ac4b2b.slice/crio-862fbbee5ff052d8d37ad1c8727db9190e6802b89c02440cd53205b8de13a649 WatchSource:0}: Error finding container 862fbbee5ff052d8d37ad1c8727db9190e6802b89c02440cd53205b8de13a649: Status 404 returned error can't find the container with id 862fbbee5ff052d8d37ad1c8727db9190e6802b89c02440cd53205b8de13a649 Jan 29 16:42:21 crc kubenswrapper[4685]: I0129 16:42:21.314756 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerStarted","Data":"862fbbee5ff052d8d37ad1c8727db9190e6802b89c02440cd53205b8de13a649"} Jan 29 16:42:22 crc kubenswrapper[4685]: I0129 16:42:22.321128 4685 generic.go:334] "Generic (PLEG): container finished" podID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerID="0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2" exitCode=0 Jan 29 16:42:22 crc kubenswrapper[4685]: I0129 16:42:22.321367 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerDied","Data":"0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2"} Jan 29 16:42:24 crc kubenswrapper[4685]: I0129 16:42:24.334153 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerStarted","Data":"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2"} Jan 29 16:42:25 crc kubenswrapper[4685]: I0129 16:42:25.343379 4685 generic.go:334] "Generic (PLEG): container finished" podID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerID="48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2" exitCode=0 Jan 29 16:42:25 crc kubenswrapper[4685]: I0129 16:42:25.343873 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerDied","Data":"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2"} Jan 29 16:42:27 crc kubenswrapper[4685]: I0129 16:42:27.366483 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerStarted","Data":"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac"} Jan 29 16:42:27 crc kubenswrapper[4685]: I0129 16:42:27.398566 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6p7fx" podStartSLOduration=3.162255011 podStartE2EDuration="7.398549013s" podCreationTimestamp="2026-01-29 16:42:20 +0000 UTC" firstStartedPulling="2026-01-29 16:42:22.323132481 +0000 UTC m=+969.520488743" lastFinishedPulling="2026-01-29 16:42:26.559426483 +0000 UTC m=+973.756782745" observedRunningTime="2026-01-29 16:42:27.394569934 +0000 UTC m=+974.591926236" watchObservedRunningTime="2026-01-29 16:42:27.398549013 +0000 UTC m=+974.595905295" Jan 29 16:42:30 crc kubenswrapper[4685]: I0129 16:42:30.975709 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:30 crc kubenswrapper[4685]: I0129 16:42:30.975766 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:32 crc kubenswrapper[4685]: I0129 16:42:32.030914 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6p7fx" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="registry-server" probeResult="failure" output=< Jan 29 16:42:32 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Jan 29 16:42:32 crc kubenswrapper[4685]: > Jan 29 16:42:41 crc kubenswrapper[4685]: I0129 16:42:41.049086 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:41 crc kubenswrapper[4685]: I0129 16:42:41.111659 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:41 crc kubenswrapper[4685]: I0129 16:42:41.287380 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.458903 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6p7fx" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="registry-server" containerID="cri-o://50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac" gracePeriod=2 Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.839306 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.983594 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content\") pod \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.983653 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzcjt\" (UniqueName: \"kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt\") pod \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.983777 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities\") pod \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\" (UID: \"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b\") " Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.985511 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities" (OuterVolumeSpecName: "utilities") pod "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" (UID: "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:42:42 crc kubenswrapper[4685]: I0129 16:42:42.989165 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt" (OuterVolumeSpecName: "kube-api-access-rzcjt") pod "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" (UID: "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b"). InnerVolumeSpecName "kube-api-access-rzcjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.084882 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzcjt\" (UniqueName: \"kubernetes.io/projected/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-kube-api-access-rzcjt\") on node \"crc\" DevicePath \"\"" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.084929 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.108473 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" (UID: "277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.185999 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.469700 4685 generic.go:334] "Generic (PLEG): container finished" podID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerID="50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac" exitCode=0 Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.469763 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerDied","Data":"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac"} Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.469802 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6p7fx" event={"ID":"277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b","Type":"ContainerDied","Data":"862fbbee5ff052d8d37ad1c8727db9190e6802b89c02440cd53205b8de13a649"} Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.469830 4685 scope.go:117] "RemoveContainer" containerID="50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.469836 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6p7fx" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.503618 4685 scope.go:117] "RemoveContainer" containerID="48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.510283 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.514720 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6p7fx"] Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.529773 4685 scope.go:117] "RemoveContainer" containerID="0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.548284 4685 scope.go:117] "RemoveContainer" containerID="50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac" Jan 29 16:42:43 crc kubenswrapper[4685]: E0129 16:42:43.548680 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac\": container with ID starting with 50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac not found: ID does not exist" containerID="50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.548739 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac"} err="failed to get container status \"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac\": rpc error: code = NotFound desc = could not find container \"50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac\": container with ID starting with 50aa228af573da0843c14e16dfdb5e104b160f468cf1e0616de609e27f24b7ac not found: ID does not exist" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.548774 4685 scope.go:117] "RemoveContainer" containerID="48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2" Jan 29 16:42:43 crc kubenswrapper[4685]: E0129 16:42:43.549182 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2\": container with ID starting with 48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2 not found: ID does not exist" containerID="48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.549214 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2"} err="failed to get container status \"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2\": rpc error: code = NotFound desc = could not find container \"48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2\": container with ID starting with 48a95ad670c37e58bd144194501196570806587994552e960d81a6d9f351b5a2 not found: ID does not exist" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.549234 4685 scope.go:117] "RemoveContainer" containerID="0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2" Jan 29 16:42:43 crc kubenswrapper[4685]: E0129 16:42:43.549486 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2\": container with ID starting with 0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2 not found: ID does not exist" containerID="0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.549516 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2"} err="failed to get container status \"0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2\": rpc error: code = NotFound desc = could not find container \"0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2\": container with ID starting with 0051fc393c15b9293f3c7ecb52c6223d3f1041345b173e3a606d8cfedb84b6f2 not found: ID does not exist" Jan 29 16:42:43 crc kubenswrapper[4685]: I0129 16:42:43.607802 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" path="/var/lib/kubelet/pods/277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b/volumes" Jan 29 16:44:04 crc kubenswrapper[4685]: I0129 16:44:04.330371 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:44:04 crc kubenswrapper[4685]: I0129 16:44:04.330974 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:44:34 crc kubenswrapper[4685]: I0129 16:44:34.330565 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:44:34 crc kubenswrapper[4685]: I0129 16:44:34.331162 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.186185 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp"] Jan 29 16:45:00 crc kubenswrapper[4685]: E0129 16:45:00.187033 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="registry-server" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.187053 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="registry-server" Jan 29 16:45:00 crc kubenswrapper[4685]: E0129 16:45:00.187072 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="extract-utilities" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.187085 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="extract-utilities" Jan 29 16:45:00 crc kubenswrapper[4685]: E0129 16:45:00.187122 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="extract-content" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.187135 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="extract-content" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.187308 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="277a4a27-ec7f-47d3-a98b-ea05b0ac4b2b" containerName="registry-server" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.187919 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.190412 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.191232 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.196899 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp"] Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.303510 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6fjl\" (UniqueName: \"kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.303562 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.303719 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.405444 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6fjl\" (UniqueName: \"kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.405864 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.406031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.407133 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.413585 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.422353 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6fjl\" (UniqueName: \"kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl\") pod \"collect-profiles-29495085-vrfcp\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.505258 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:00 crc kubenswrapper[4685]: I0129 16:45:00.920030 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp"] Jan 29 16:45:01 crc kubenswrapper[4685]: I0129 16:45:01.311483 4685 generic.go:334] "Generic (PLEG): container finished" podID="f02f4876-d2d0-43ec-ab3d-45e5048cc12a" containerID="d0786c1d298e3c175fae299849ce4133c9f722f9a88cc85cc540256d0c5c73cd" exitCode=0 Jan 29 16:45:01 crc kubenswrapper[4685]: I0129 16:45:01.311671 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" event={"ID":"f02f4876-d2d0-43ec-ab3d-45e5048cc12a","Type":"ContainerDied","Data":"d0786c1d298e3c175fae299849ce4133c9f722f9a88cc85cc540256d0c5c73cd"} Jan 29 16:45:01 crc kubenswrapper[4685]: I0129 16:45:01.311793 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" event={"ID":"f02f4876-d2d0-43ec-ab3d-45e5048cc12a","Type":"ContainerStarted","Data":"daad3bc73ab846f80e4350af8e3585d0d791a115e80a88fad986675ea675a041"} Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.561177 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.734626 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume\") pod \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.734723 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume\") pod \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.734774 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6fjl\" (UniqueName: \"kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl\") pod \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\" (UID: \"f02f4876-d2d0-43ec-ab3d-45e5048cc12a\") " Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.737184 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume" (OuterVolumeSpecName: "config-volume") pod "f02f4876-d2d0-43ec-ab3d-45e5048cc12a" (UID: "f02f4876-d2d0-43ec-ab3d-45e5048cc12a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.744587 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl" (OuterVolumeSpecName: "kube-api-access-f6fjl") pod "f02f4876-d2d0-43ec-ab3d-45e5048cc12a" (UID: "f02f4876-d2d0-43ec-ab3d-45e5048cc12a"). InnerVolumeSpecName "kube-api-access-f6fjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.744926 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f02f4876-d2d0-43ec-ab3d-45e5048cc12a" (UID: "f02f4876-d2d0-43ec-ab3d-45e5048cc12a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.837167 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.837225 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-config-volume\") on node \"crc\" DevicePath \"\"" Jan 29 16:45:02 crc kubenswrapper[4685]: I0129 16:45:02.837244 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6fjl\" (UniqueName: \"kubernetes.io/projected/f02f4876-d2d0-43ec-ab3d-45e5048cc12a-kube-api-access-f6fjl\") on node \"crc\" DevicePath \"\"" Jan 29 16:45:03 crc kubenswrapper[4685]: I0129 16:45:03.330028 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" event={"ID":"f02f4876-d2d0-43ec-ab3d-45e5048cc12a","Type":"ContainerDied","Data":"daad3bc73ab846f80e4350af8e3585d0d791a115e80a88fad986675ea675a041"} Jan 29 16:45:03 crc kubenswrapper[4685]: I0129 16:45:03.330082 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="daad3bc73ab846f80e4350af8e3585d0d791a115e80a88fad986675ea675a041" Jan 29 16:45:03 crc kubenswrapper[4685]: I0129 16:45:03.330060 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495085-vrfcp" Jan 29 16:45:04 crc kubenswrapper[4685]: I0129 16:45:04.331445 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:45:04 crc kubenswrapper[4685]: I0129 16:45:04.331769 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:45:04 crc kubenswrapper[4685]: I0129 16:45:04.331811 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:45:04 crc kubenswrapper[4685]: I0129 16:45:04.332320 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:45:04 crc kubenswrapper[4685]: I0129 16:45:04.332375 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc" gracePeriod=600 Jan 29 16:45:05 crc kubenswrapper[4685]: I0129 16:45:05.347575 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc" exitCode=0 Jan 29 16:45:05 crc kubenswrapper[4685]: I0129 16:45:05.347700 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc"} Jan 29 16:45:05 crc kubenswrapper[4685]: I0129 16:45:05.348061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32"} Jan 29 16:45:05 crc kubenswrapper[4685]: I0129 16:45:05.348103 4685 scope.go:117] "RemoveContainer" containerID="f660a642750ec42bf7c2e5c27a23b363a3576c5f9ac5d6eead16533b53b29349" Jan 29 16:47:04 crc kubenswrapper[4685]: I0129 16:47:04.331127 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:47:04 crc kubenswrapper[4685]: I0129 16:47:04.331739 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.178563 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs"] Jan 29 16:47:18 crc kubenswrapper[4685]: E0129 16:47:18.179587 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f02f4876-d2d0-43ec-ab3d-45e5048cc12a" containerName="collect-profiles" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.179618 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f02f4876-d2d0-43ec-ab3d-45e5048cc12a" containerName="collect-profiles" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.179849 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f02f4876-d2d0-43ec-ab3d-45e5048cc12a" containerName="collect-profiles" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.181638 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.189512 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs"] Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.192200 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.231929 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.232193 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddqbr\" (UniqueName: \"kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.232283 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.332965 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.333236 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddqbr\" (UniqueName: \"kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.333325 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.333761 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.333834 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.356164 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddqbr\" (UniqueName: \"kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.504592 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:18 crc kubenswrapper[4685]: I0129 16:47:18.734833 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs"] Jan 29 16:47:19 crc kubenswrapper[4685]: I0129 16:47:19.185523 4685 generic.go:334] "Generic (PLEG): container finished" podID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerID="a18e334194d524f0723f7682bea03675f5a0448edf730e664ca9fc5b88314766" exitCode=0 Jan 29 16:47:19 crc kubenswrapper[4685]: I0129 16:47:19.185576 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" event={"ID":"3bd092a0-bc7a-4763-b041-572e68b0343b","Type":"ContainerDied","Data":"a18e334194d524f0723f7682bea03675f5a0448edf730e664ca9fc5b88314766"} Jan 29 16:47:19 crc kubenswrapper[4685]: I0129 16:47:19.185605 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" event={"ID":"3bd092a0-bc7a-4763-b041-572e68b0343b","Type":"ContainerStarted","Data":"73fad48984bc7329e2642002bdb52d739caba77e5a2517786fed198d31d1316c"} Jan 29 16:47:19 crc kubenswrapper[4685]: I0129 16:47:19.187995 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 29 16:47:21 crc kubenswrapper[4685]: I0129 16:47:21.201181 4685 generic.go:334] "Generic (PLEG): container finished" podID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerID="a1b3844f49f0c3e115bda70d3f9a77b6b59e91235e379a79ffe8c1104d1c3347" exitCode=0 Jan 29 16:47:21 crc kubenswrapper[4685]: I0129 16:47:21.202067 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" event={"ID":"3bd092a0-bc7a-4763-b041-572e68b0343b","Type":"ContainerDied","Data":"a1b3844f49f0c3e115bda70d3f9a77b6b59e91235e379a79ffe8c1104d1c3347"} Jan 29 16:47:22 crc kubenswrapper[4685]: I0129 16:47:22.212037 4685 generic.go:334] "Generic (PLEG): container finished" podID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerID="732e53777a0ac15ce7f34dca573dc6a5d789f18b46283361f2282ea1791edbc0" exitCode=0 Jan 29 16:47:22 crc kubenswrapper[4685]: I0129 16:47:22.212187 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" event={"ID":"3bd092a0-bc7a-4763-b041-572e68b0343b","Type":"ContainerDied","Data":"732e53777a0ac15ce7f34dca573dc6a5d789f18b46283361f2282ea1791edbc0"} Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.473240 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.607745 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle\") pod \"3bd092a0-bc7a-4763-b041-572e68b0343b\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.607800 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddqbr\" (UniqueName: \"kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr\") pod \"3bd092a0-bc7a-4763-b041-572e68b0343b\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.607887 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util\") pod \"3bd092a0-bc7a-4763-b041-572e68b0343b\" (UID: \"3bd092a0-bc7a-4763-b041-572e68b0343b\") " Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.608765 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle" (OuterVolumeSpecName: "bundle") pod "3bd092a0-bc7a-4763-b041-572e68b0343b" (UID: "3bd092a0-bc7a-4763-b041-572e68b0343b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.613956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr" (OuterVolumeSpecName: "kube-api-access-ddqbr") pod "3bd092a0-bc7a-4763-b041-572e68b0343b" (UID: "3bd092a0-bc7a-4763-b041-572e68b0343b"). InnerVolumeSpecName "kube-api-access-ddqbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.630687 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util" (OuterVolumeSpecName: "util") pod "3bd092a0-bc7a-4763-b041-572e68b0343b" (UID: "3bd092a0-bc7a-4763-b041-572e68b0343b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.709437 4685 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-bundle\") on node \"crc\" DevicePath \"\"" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.709505 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddqbr\" (UniqueName: \"kubernetes.io/projected/3bd092a0-bc7a-4763-b041-572e68b0343b-kube-api-access-ddqbr\") on node \"crc\" DevicePath \"\"" Jan 29 16:47:23 crc kubenswrapper[4685]: I0129 16:47:23.709533 4685 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bd092a0-bc7a-4763-b041-572e68b0343b-util\") on node \"crc\" DevicePath \"\"" Jan 29 16:47:24 crc kubenswrapper[4685]: I0129 16:47:24.225925 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" event={"ID":"3bd092a0-bc7a-4763-b041-572e68b0343b","Type":"ContainerDied","Data":"73fad48984bc7329e2642002bdb52d739caba77e5a2517786fed198d31d1316c"} Jan 29 16:47:24 crc kubenswrapper[4685]: I0129 16:47:24.225971 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73fad48984bc7329e2642002bdb52d739caba77e5a2517786fed198d31d1316c" Jan 29 16:47:24 crc kubenswrapper[4685]: I0129 16:47:24.225985 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.460549 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-r92ld/must-gather-42zl8"] Jan 29 16:47:27 crc kubenswrapper[4685]: E0129 16:47:27.461077 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="util" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.461092 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="util" Jan 29 16:47:27 crc kubenswrapper[4685]: E0129 16:47:27.461115 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="pull" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.461123 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="pull" Jan 29 16:47:27 crc kubenswrapper[4685]: E0129 16:47:27.461140 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="extract" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.461150 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="extract" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.461255 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bd092a0-bc7a-4763-b041-572e68b0343b" containerName="extract" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.462076 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.466041 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-r92ld"/"kube-root-ca.crt" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.466143 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-r92ld"/"openshift-service-ca.crt" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.472896 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-r92ld/must-gather-42zl8"] Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.559933 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.560026 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzjbt\" (UniqueName: \"kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.661662 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzjbt\" (UniqueName: \"kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.661744 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.662151 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.680347 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzjbt\" (UniqueName: \"kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt\") pod \"must-gather-42zl8\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:27 crc kubenswrapper[4685]: I0129 16:47:27.779737 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:47:28 crc kubenswrapper[4685]: I0129 16:47:28.193258 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-r92ld/must-gather-42zl8"] Jan 29 16:47:28 crc kubenswrapper[4685]: W0129 16:47:28.200575 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod554eefcc_5b6c_4a74_8931_583700f46bb4.slice/crio-4204dfa4545425423f660d35e06bcfd82c2196e7e722df513acc9b083a6ea0c8 WatchSource:0}: Error finding container 4204dfa4545425423f660d35e06bcfd82c2196e7e722df513acc9b083a6ea0c8: Status 404 returned error can't find the container with id 4204dfa4545425423f660d35e06bcfd82c2196e7e722df513acc9b083a6ea0c8 Jan 29 16:47:28 crc kubenswrapper[4685]: I0129 16:47:28.253224 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r92ld/must-gather-42zl8" event={"ID":"554eefcc-5b6c-4a74-8931-583700f46bb4","Type":"ContainerStarted","Data":"4204dfa4545425423f660d35e06bcfd82c2196e7e722df513acc9b083a6ea0c8"} Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.220539 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-45gq5"] Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.221760 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.225216 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.225496 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-whc42" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.225694 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.239754 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-45gq5"] Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.397795 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97nxq\" (UniqueName: \"kubernetes.io/projected/1175d136-d53f-4700-b3bb-fe3c844736ec-kube-api-access-97nxq\") pod \"nmstate-operator-646758c888-45gq5\" (UID: \"1175d136-d53f-4700-b3bb-fe3c844736ec\") " pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.499022 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97nxq\" (UniqueName: \"kubernetes.io/projected/1175d136-d53f-4700-b3bb-fe3c844736ec-kube-api-access-97nxq\") pod \"nmstate-operator-646758c888-45gq5\" (UID: \"1175d136-d53f-4700-b3bb-fe3c844736ec\") " pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.517706 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97nxq\" (UniqueName: \"kubernetes.io/projected/1175d136-d53f-4700-b3bb-fe3c844736ec-kube-api-access-97nxq\") pod \"nmstate-operator-646758c888-45gq5\" (UID: \"1175d136-d53f-4700-b3bb-fe3c844736ec\") " pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.543377 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" Jan 29 16:47:30 crc kubenswrapper[4685]: I0129 16:47:30.729217 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-45gq5"] Jan 29 16:47:34 crc kubenswrapper[4685]: I0129 16:47:34.331245 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:47:34 crc kubenswrapper[4685]: I0129 16:47:34.331590 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:47:36 crc kubenswrapper[4685]: I0129 16:47:36.299264 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r92ld/must-gather-42zl8" event={"ID":"554eefcc-5b6c-4a74-8931-583700f46bb4","Type":"ContainerStarted","Data":"a9db02dec571cceef2ed2f7651393a2416738df543be53c1c6b955f9b838778e"} Jan 29 16:47:36 crc kubenswrapper[4685]: I0129 16:47:36.299548 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r92ld/must-gather-42zl8" event={"ID":"554eefcc-5b6c-4a74-8931-583700f46bb4","Type":"ContainerStarted","Data":"96075c8752920f56f992deab6db02e0e7572d2f61bf3083f2738a14d51ad57ca"} Jan 29 16:47:36 crc kubenswrapper[4685]: I0129 16:47:36.300778 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" event={"ID":"1175d136-d53f-4700-b3bb-fe3c844736ec","Type":"ContainerStarted","Data":"209e0443740787fd185464221dd2901974a3fa817eb97dbbdecd6a507484f2fd"} Jan 29 16:47:36 crc kubenswrapper[4685]: I0129 16:47:36.315414 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-r92ld/must-gather-42zl8" podStartSLOduration=2.096136739 podStartE2EDuration="9.31537626s" podCreationTimestamp="2026-01-29 16:47:27 +0000 UTC" firstStartedPulling="2026-01-29 16:47:28.203542296 +0000 UTC m=+1275.400898568" lastFinishedPulling="2026-01-29 16:47:35.422781837 +0000 UTC m=+1282.620138089" observedRunningTime="2026-01-29 16:47:36.313590501 +0000 UTC m=+1283.510946783" watchObservedRunningTime="2026-01-29 16:47:36.31537626 +0000 UTC m=+1283.512732542" Jan 29 16:47:39 crc kubenswrapper[4685]: I0129 16:47:39.318963 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" event={"ID":"1175d136-d53f-4700-b3bb-fe3c844736ec","Type":"ContainerStarted","Data":"43a66029d50b1cdeddfb752ac2933fa60225b5d7261d20ae9810adc907c82663"} Jan 29 16:47:39 crc kubenswrapper[4685]: I0129 16:47:39.345291 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-45gq5" podStartSLOduration=6.370690426 podStartE2EDuration="9.345268221s" podCreationTimestamp="2026-01-29 16:47:30 +0000 UTC" firstStartedPulling="2026-01-29 16:47:35.337806079 +0000 UTC m=+1282.535162361" lastFinishedPulling="2026-01-29 16:47:38.312383884 +0000 UTC m=+1285.509740156" observedRunningTime="2026-01-29 16:47:39.343225946 +0000 UTC m=+1286.540582208" watchObservedRunningTime="2026-01-29 16:47:39.345268221 +0000 UTC m=+1286.542624523" Jan 29 16:48:04 crc kubenswrapper[4685]: I0129 16:48:04.330954 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:48:04 crc kubenswrapper[4685]: I0129 16:48:04.331777 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:48:04 crc kubenswrapper[4685]: I0129 16:48:04.331832 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:48:04 crc kubenswrapper[4685]: I0129 16:48:04.332589 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:48:04 crc kubenswrapper[4685]: I0129 16:48:04.332645 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32" gracePeriod=600 Jan 29 16:48:05 crc kubenswrapper[4685]: I0129 16:48:05.464143 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32" exitCode=0 Jan 29 16:48:05 crc kubenswrapper[4685]: I0129 16:48:05.464237 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32"} Jan 29 16:48:05 crc kubenswrapper[4685]: I0129 16:48:05.464586 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerStarted","Data":"aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd"} Jan 29 16:48:05 crc kubenswrapper[4685]: I0129 16:48:05.464615 4685 scope.go:117] "RemoveContainer" containerID="4c171405786cfe69cce6ff6a20974f802a4550e5bd7ac86b69a6669c226672bc" Jan 29 16:48:12 crc kubenswrapper[4685]: I0129 16:48:12.618334 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-9qg72_70b3bb34-0cd2-49ff-9eb0-e392c530fab1/control-plane-machine-set-operator/0.log" Jan 29 16:48:12 crc kubenswrapper[4685]: I0129 16:48:12.748694 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zbcw2_eefb747e-9461-4086-8f22-03f6730c26ed/kube-rbac-proxy/0.log" Jan 29 16:48:12 crc kubenswrapper[4685]: I0129 16:48:12.786778 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zbcw2_eefb747e-9461-4086-8f22-03f6730c26ed/machine-api-operator/0.log" Jan 29 16:48:24 crc kubenswrapper[4685]: I0129 16:48:24.526672 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-r8gnp_aa58a373-0547-4352-910d-85b7e54e76e4/cert-manager-controller/0.log" Jan 29 16:48:24 crc kubenswrapper[4685]: I0129 16:48:24.644384 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-pm9td_92c08ef0-ae20-4ff1-83d1-4cef334602ef/cert-manager-cainjector/0.log" Jan 29 16:48:24 crc kubenswrapper[4685]: I0129 16:48:24.692353 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-ktj5s_ff37a656-e807-48e5-a595-cc0d97f1133c/cert-manager-webhook/0.log" Jan 29 16:48:35 crc kubenswrapper[4685]: I0129 16:48:35.876192 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-45gq5_1175d136-d53f-4700-b3bb-fe3c844736ec/nmstate-operator/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.655349 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/util/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.808752 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/pull/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.810831 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/util/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.812839 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/pull/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.962563 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/util/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.986571 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/extract/0.log" Jan 29 16:48:59 crc kubenswrapper[4685]: I0129 16:48:59.987852 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v8lrs_3bd092a0-bc7a-4763-b041-572e68b0343b/pull/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.128609 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-utilities/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.273524 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-utilities/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.275336 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.287265 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.424593 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-utilities/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.446242 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.575116 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-hhqnq_465498e2-f29b-47b9-a34d-b770b801f075/registry-server/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.629651 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-utilities/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.715338 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-utilities/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.723699 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.750221 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.911300 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-content/0.log" Jan 29 16:49:00 crc kubenswrapper[4685]: I0129 16:49:00.957098 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.083718 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b97g5_beddcef1-54d6-4150-b62f-710112c84ec6/registry-server/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.120343 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-t822z_6004d355-0746-423d-a4e5-fe07d8494da1/marketplace-operator/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.137987 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.331470 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.353061 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-content/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.359066 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-content/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.526307 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.532295 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/extract-content/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.562222 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f5k6j_3c3daf83-3283-4698-b95a-b03aafe35302/registry-server/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.683332 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.855838 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-content/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.860307 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-utilities/0.log" Jan 29 16:49:01 crc kubenswrapper[4685]: I0129 16:49:01.865579 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-content/0.log" Jan 29 16:49:02 crc kubenswrapper[4685]: I0129 16:49:02.040312 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-utilities/0.log" Jan 29 16:49:02 crc kubenswrapper[4685]: I0129 16:49:02.050293 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/extract-content/0.log" Jan 29 16:49:02 crc kubenswrapper[4685]: I0129 16:49:02.249220 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr74r_99ccb4b9-6fe9-4280-8765-a7c78ee9892a/registry-server/0.log" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.075801 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.078166 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.081183 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.146052 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.146364 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzmfr\" (UniqueName: \"kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.146540 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.247176 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.247477 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzmfr\" (UniqueName: \"kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.247558 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.247950 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.247961 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.266638 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.267950 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.272531 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzmfr\" (UniqueName: \"kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr\") pod \"certified-operators-vkzhd\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.286918 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.349383 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbz69\" (UniqueName: \"kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.349450 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.349479 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.411610 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.450756 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbz69\" (UniqueName: \"kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.450931 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.451010 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.451785 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.452301 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.470573 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbz69\" (UniqueName: \"kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69\") pod \"community-operators-v2mrq\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.607988 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:45 crc kubenswrapper[4685]: I0129 16:49:45.673006 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:46 crc kubenswrapper[4685]: I0129 16:49:46.028746 4685 generic.go:334] "Generic (PLEG): container finished" podID="faf23b92-e59d-4141-a999-2479791bec5d" containerID="0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a" exitCode=0 Jan 29 16:49:46 crc kubenswrapper[4685]: I0129 16:49:46.029035 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerDied","Data":"0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a"} Jan 29 16:49:46 crc kubenswrapper[4685]: I0129 16:49:46.029062 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerStarted","Data":"4359cbaa767031b71ac8e7ceb45878442e228dc45eec2a9a4e6620f761da1a83"} Jan 29 16:49:46 crc kubenswrapper[4685]: I0129 16:49:46.075347 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:49:46 crc kubenswrapper[4685]: W0129 16:49:46.084798 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f975b06_1470_46b4_a4c7_bcd29692d1dd.slice/crio-b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4 WatchSource:0}: Error finding container b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4: Status 404 returned error can't find the container with id b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4 Jan 29 16:49:47 crc kubenswrapper[4685]: I0129 16:49:47.036006 4685 generic.go:334] "Generic (PLEG): container finished" podID="faf23b92-e59d-4141-a999-2479791bec5d" containerID="a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a" exitCode=0 Jan 29 16:49:47 crc kubenswrapper[4685]: I0129 16:49:47.036042 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerDied","Data":"a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a"} Jan 29 16:49:47 crc kubenswrapper[4685]: I0129 16:49:47.037757 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerID="f01998d8fd05750f636587957ba6c44e4b53c953fe98433e624d4413c83fb842" exitCode=0 Jan 29 16:49:47 crc kubenswrapper[4685]: I0129 16:49:47.037787 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerDied","Data":"f01998d8fd05750f636587957ba6c44e4b53c953fe98433e624d4413c83fb842"} Jan 29 16:49:47 crc kubenswrapper[4685]: I0129 16:49:47.037811 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerStarted","Data":"b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4"} Jan 29 16:49:48 crc kubenswrapper[4685]: I0129 16:49:48.047292 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerStarted","Data":"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81"} Jan 29 16:49:48 crc kubenswrapper[4685]: I0129 16:49:48.071678 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vkzhd" podStartSLOduration=1.615512092 podStartE2EDuration="3.07165942s" podCreationTimestamp="2026-01-29 16:49:45 +0000 UTC" firstStartedPulling="2026-01-29 16:49:46.030238584 +0000 UTC m=+1413.227594846" lastFinishedPulling="2026-01-29 16:49:47.486385892 +0000 UTC m=+1414.683742174" observedRunningTime="2026-01-29 16:49:48.070163759 +0000 UTC m=+1415.267520031" watchObservedRunningTime="2026-01-29 16:49:48.07165942 +0000 UTC m=+1415.269015702" Jan 29 16:49:49 crc kubenswrapper[4685]: I0129 16:49:49.053317 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerID="83b1ab5c2a316fa700ef56cee01b2131438a71bbd8781c3b46411bf57ace6886" exitCode=0 Jan 29 16:49:49 crc kubenswrapper[4685]: I0129 16:49:49.053381 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerDied","Data":"83b1ab5c2a316fa700ef56cee01b2131438a71bbd8781c3b46411bf57ace6886"} Jan 29 16:49:50 crc kubenswrapper[4685]: I0129 16:49:50.063913 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerStarted","Data":"2c3551f00480f2ce59fdc8d83f3467d5a76152b59f4d7825cdece9bb2eb36e04"} Jan 29 16:49:50 crc kubenswrapper[4685]: I0129 16:49:50.097023 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v2mrq" podStartSLOduration=2.653167132 podStartE2EDuration="5.096997097s" podCreationTimestamp="2026-01-29 16:49:45 +0000 UTC" firstStartedPulling="2026-01-29 16:49:47.038835151 +0000 UTC m=+1414.236191433" lastFinishedPulling="2026-01-29 16:49:49.482665136 +0000 UTC m=+1416.680021398" observedRunningTime="2026-01-29 16:49:50.091266491 +0000 UTC m=+1417.288622753" watchObservedRunningTime="2026-01-29 16:49:50.096997097 +0000 UTC m=+1417.294353359" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.413377 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.413861 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.456213 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.611610 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.611653 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:55 crc kubenswrapper[4685]: I0129 16:49:55.647568 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:56 crc kubenswrapper[4685]: I0129 16:49:56.150474 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:56 crc kubenswrapper[4685]: I0129 16:49:56.162063 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:49:57 crc kubenswrapper[4685]: I0129 16:49:57.086666 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:58 crc kubenswrapper[4685]: I0129 16:49:58.134124 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vkzhd" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="registry-server" containerID="cri-o://af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81" gracePeriod=2 Jan 29 16:49:58 crc kubenswrapper[4685]: I0129 16:49:58.488472 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:49:58 crc kubenswrapper[4685]: I0129 16:49:58.489021 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v2mrq" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="registry-server" containerID="cri-o://2c3551f00480f2ce59fdc8d83f3467d5a76152b59f4d7825cdece9bb2eb36e04" gracePeriod=2 Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.060174 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.140824 4685 generic.go:334] "Generic (PLEG): container finished" podID="faf23b92-e59d-4141-a999-2479791bec5d" containerID="af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81" exitCode=0 Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.140862 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerDied","Data":"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81"} Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.140886 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vkzhd" event={"ID":"faf23b92-e59d-4141-a999-2479791bec5d","Type":"ContainerDied","Data":"4359cbaa767031b71ac8e7ceb45878442e228dc45eec2a9a4e6620f761da1a83"} Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.140904 4685 scope.go:117] "RemoveContainer" containerID="af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.140914 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vkzhd" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.154979 4685 scope.go:117] "RemoveContainer" containerID="a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.172490 4685 scope.go:117] "RemoveContainer" containerID="0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.193273 4685 scope.go:117] "RemoveContainer" containerID="af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81" Jan 29 16:49:59 crc kubenswrapper[4685]: E0129 16:49:59.193781 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81\": container with ID starting with af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81 not found: ID does not exist" containerID="af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.193834 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81"} err="failed to get container status \"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81\": rpc error: code = NotFound desc = could not find container \"af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81\": container with ID starting with af9c0a0cbc25c6f6a66830512f4a3a2343a90872b4b6f17ab32310c3e0579f81 not found: ID does not exist" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.193874 4685 scope.go:117] "RemoveContainer" containerID="a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a" Jan 29 16:49:59 crc kubenswrapper[4685]: E0129 16:49:59.194264 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a\": container with ID starting with a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a not found: ID does not exist" containerID="a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.194292 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a"} err="failed to get container status \"a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a\": rpc error: code = NotFound desc = could not find container \"a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a\": container with ID starting with a09aace7d63fbb849430a3f86b03a440fe7d526b44c48eb300bc5631e276513a not found: ID does not exist" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.194314 4685 scope.go:117] "RemoveContainer" containerID="0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a" Jan 29 16:49:59 crc kubenswrapper[4685]: E0129 16:49:59.194685 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a\": container with ID starting with 0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a not found: ID does not exist" containerID="0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.194744 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a"} err="failed to get container status \"0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a\": rpc error: code = NotFound desc = could not find container \"0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a\": container with ID starting with 0bb4dc960d6457e6a4442a821a642ed6e9f6165d0f74c1635cd6f10730e9a77a not found: ID does not exist" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.235223 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities\") pod \"faf23b92-e59d-4141-a999-2479791bec5d\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.235283 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content\") pod \"faf23b92-e59d-4141-a999-2479791bec5d\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.235411 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzmfr\" (UniqueName: \"kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr\") pod \"faf23b92-e59d-4141-a999-2479791bec5d\" (UID: \"faf23b92-e59d-4141-a999-2479791bec5d\") " Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.236015 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities" (OuterVolumeSpecName: "utilities") pod "faf23b92-e59d-4141-a999-2479791bec5d" (UID: "faf23b92-e59d-4141-a999-2479791bec5d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.246178 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr" (OuterVolumeSpecName: "kube-api-access-mzmfr") pod "faf23b92-e59d-4141-a999-2479791bec5d" (UID: "faf23b92-e59d-4141-a999-2479791bec5d"). InnerVolumeSpecName "kube-api-access-mzmfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.302679 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "faf23b92-e59d-4141-a999-2479791bec5d" (UID: "faf23b92-e59d-4141-a999-2479791bec5d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.337363 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.337438 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faf23b92-e59d-4141-a999-2479791bec5d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.337556 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzmfr\" (UniqueName: \"kubernetes.io/projected/faf23b92-e59d-4141-a999-2479791bec5d-kube-api-access-mzmfr\") on node \"crc\" DevicePath \"\"" Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.487493 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.494058 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vkzhd"] Jan 29 16:49:59 crc kubenswrapper[4685]: I0129 16:49:59.608842 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faf23b92-e59d-4141-a999-2479791bec5d" path="/var/lib/kubelet/pods/faf23b92-e59d-4141-a999-2479791bec5d/volumes" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.147203 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerID="2c3551f00480f2ce59fdc8d83f3467d5a76152b59f4d7825cdece9bb2eb36e04" exitCode=0 Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.147290 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerDied","Data":"2c3551f00480f2ce59fdc8d83f3467d5a76152b59f4d7825cdece9bb2eb36e04"} Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.147325 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2mrq" event={"ID":"1f975b06-1470-46b4-a4c7-bcd29692d1dd","Type":"ContainerDied","Data":"b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4"} Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.147340 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8e606c9f90f91e61686143dc8863ce5eff78758b317d6b1ac192bcafe89ecf4" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.156015 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.350032 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbz69\" (UniqueName: \"kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69\") pod \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.350102 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities\") pod \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.350171 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content\") pod \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\" (UID: \"1f975b06-1470-46b4-a4c7-bcd29692d1dd\") " Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.353891 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities" (OuterVolumeSpecName: "utilities") pod "1f975b06-1470-46b4-a4c7-bcd29692d1dd" (UID: "1f975b06-1470-46b4-a4c7-bcd29692d1dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.356357 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69" (OuterVolumeSpecName: "kube-api-access-vbz69") pod "1f975b06-1470-46b4-a4c7-bcd29692d1dd" (UID: "1f975b06-1470-46b4-a4c7-bcd29692d1dd"). InnerVolumeSpecName "kube-api-access-vbz69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.424668 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f975b06-1470-46b4-a4c7-bcd29692d1dd" (UID: "1f975b06-1470-46b4-a4c7-bcd29692d1dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.452058 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbz69\" (UniqueName: \"kubernetes.io/projected/1f975b06-1470-46b4-a4c7-bcd29692d1dd-kube-api-access-vbz69\") on node \"crc\" DevicePath \"\"" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.452109 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:50:00 crc kubenswrapper[4685]: I0129 16:50:00.452124 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f975b06-1470-46b4-a4c7-bcd29692d1dd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:50:01 crc kubenswrapper[4685]: I0129 16:50:01.154569 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2mrq" Jan 29 16:50:01 crc kubenswrapper[4685]: I0129 16:50:01.210465 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:50:01 crc kubenswrapper[4685]: I0129 16:50:01.218271 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v2mrq"] Jan 29 16:50:01 crc kubenswrapper[4685]: I0129 16:50:01.618012 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" path="/var/lib/kubelet/pods/1f975b06-1470-46b4-a4c7-bcd29692d1dd/volumes" Jan 29 16:50:04 crc kubenswrapper[4685]: I0129 16:50:04.330695 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:50:04 crc kubenswrapper[4685]: I0129 16:50:04.331128 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:50:06 crc kubenswrapper[4685]: I0129 16:50:06.187043 4685 generic.go:334] "Generic (PLEG): container finished" podID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerID="96075c8752920f56f992deab6db02e0e7572d2f61bf3083f2738a14d51ad57ca" exitCode=0 Jan 29 16:50:06 crc kubenswrapper[4685]: I0129 16:50:06.187173 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-r92ld/must-gather-42zl8" event={"ID":"554eefcc-5b6c-4a74-8931-583700f46bb4","Type":"ContainerDied","Data":"96075c8752920f56f992deab6db02e0e7572d2f61bf3083f2738a14d51ad57ca"} Jan 29 16:50:06 crc kubenswrapper[4685]: I0129 16:50:06.187825 4685 scope.go:117] "RemoveContainer" containerID="96075c8752920f56f992deab6db02e0e7572d2f61bf3083f2738a14d51ad57ca" Jan 29 16:50:06 crc kubenswrapper[4685]: I0129 16:50:06.465561 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r92ld_must-gather-42zl8_554eefcc-5b6c-4a74-8931-583700f46bb4/gather/0.log" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.045858 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-r92ld/must-gather-42zl8"] Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.046611 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-r92ld/must-gather-42zl8" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="copy" containerID="cri-o://a9db02dec571cceef2ed2f7651393a2416738df543be53c1c6b955f9b838778e" gracePeriod=2 Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.051472 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-r92ld/must-gather-42zl8"] Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.232008 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r92ld_must-gather-42zl8_554eefcc-5b6c-4a74-8931-583700f46bb4/copy/0.log" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.232605 4685 generic.go:334] "Generic (PLEG): container finished" podID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerID="a9db02dec571cceef2ed2f7651393a2416738df543be53c1c6b955f9b838778e" exitCode=143 Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.405595 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-r92ld_must-gather-42zl8_554eefcc-5b6c-4a74-8931-583700f46bb4/copy/0.log" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.406401 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.526895 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output\") pod \"554eefcc-5b6c-4a74-8931-583700f46bb4\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.526950 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzjbt\" (UniqueName: \"kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt\") pod \"554eefcc-5b6c-4a74-8931-583700f46bb4\" (UID: \"554eefcc-5b6c-4a74-8931-583700f46bb4\") " Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.533094 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt" (OuterVolumeSpecName: "kube-api-access-zzjbt") pod "554eefcc-5b6c-4a74-8931-583700f46bb4" (UID: "554eefcc-5b6c-4a74-8931-583700f46bb4"). InnerVolumeSpecName "kube-api-access-zzjbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.575077 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "554eefcc-5b6c-4a74-8931-583700f46bb4" (UID: "554eefcc-5b6c-4a74-8931-583700f46bb4"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.608914 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" path="/var/lib/kubelet/pods/554eefcc-5b6c-4a74-8931-583700f46bb4/volumes" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.628600 4685 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/554eefcc-5b6c-4a74-8931-583700f46bb4-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 29 16:50:13 crc kubenswrapper[4685]: I0129 16:50:13.628623 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzjbt\" (UniqueName: \"kubernetes.io/projected/554eefcc-5b6c-4a74-8931-583700f46bb4-kube-api-access-zzjbt\") on node \"crc\" DevicePath \"\"" Jan 29 16:50:14 crc kubenswrapper[4685]: I0129 16:50:14.238973 4685 scope.go:117] "RemoveContainer" containerID="a9db02dec571cceef2ed2f7651393a2416738df543be53c1c6b955f9b838778e" Jan 29 16:50:14 crc kubenswrapper[4685]: I0129 16:50:14.238992 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-r92ld/must-gather-42zl8" Jan 29 16:50:14 crc kubenswrapper[4685]: I0129 16:50:14.256905 4685 scope.go:117] "RemoveContainer" containerID="96075c8752920f56f992deab6db02e0e7572d2f61bf3083f2738a14d51ad57ca" Jan 29 16:50:34 crc kubenswrapper[4685]: I0129 16:50:34.330946 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:50:34 crc kubenswrapper[4685]: I0129 16:50:34.331585 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.330564 4685 patch_prober.go:28] interesting pod/machine-config-daemon-kbcwf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.331263 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.331560 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.332376 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd"} pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.332489 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerName="machine-config-daemon" containerID="cri-o://aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" gracePeriod=600 Jan 29 16:51:04 crc kubenswrapper[4685]: E0129 16:51:04.464523 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.525224 4685 generic.go:334] "Generic (PLEG): container finished" podID="6ceb996f-40db-4a5d-915d-dd0f6d926751" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" exitCode=0 Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.525301 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" event={"ID":"6ceb996f-40db-4a5d-915d-dd0f6d926751","Type":"ContainerDied","Data":"aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd"} Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.525571 4685 scope.go:117] "RemoveContainer" containerID="f68bba25553a4323636f99a598b0dffeca7aad51c31793993d5ed4d16b648a32" Jan 29 16:51:04 crc kubenswrapper[4685]: I0129 16:51:04.526019 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:51:04 crc kubenswrapper[4685]: E0129 16:51:04.526218 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:51:15 crc kubenswrapper[4685]: I0129 16:51:15.601431 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:51:15 crc kubenswrapper[4685]: E0129 16:51:15.602349 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:51:30 crc kubenswrapper[4685]: I0129 16:51:30.602128 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:51:30 crc kubenswrapper[4685]: E0129 16:51:30.602676 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:51:45 crc kubenswrapper[4685]: I0129 16:51:45.601479 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:51:45 crc kubenswrapper[4685]: E0129 16:51:45.602297 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.889614 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890278 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="gather" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890295 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="gather" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890307 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="extract-content" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890315 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="extract-content" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890327 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890336 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890353 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="extract-utilities" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890362 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="extract-utilities" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890373 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="extract-content" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890381 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="extract-content" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890415 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890422 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890440 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="extract-utilities" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890448 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="extract-utilities" Jan 29 16:51:53 crc kubenswrapper[4685]: E0129 16:51:53.890463 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="copy" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890471 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="copy" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890587 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="gather" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890605 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="faf23b92-e59d-4141-a999-2479791bec5d" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890617 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="554eefcc-5b6c-4a74-8931-583700f46bb4" containerName="copy" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.890626 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f975b06-1470-46b4-a4c7-bcd29692d1dd" containerName="registry-server" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.891528 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.908575 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.915683 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.915755 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:53 crc kubenswrapper[4685]: I0129 16:51:53.915792 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv69m\" (UniqueName: \"kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.016899 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.017054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.017504 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.017620 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.017702 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv69m\" (UniqueName: \"kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.043982 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv69m\" (UniqueName: \"kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m\") pod \"redhat-marketplace-nfcmh\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.210606 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.605910 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.821993 4685 generic.go:334] "Generic (PLEG): container finished" podID="9836595a-8276-425c-8773-d14a23123a1d" containerID="45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49" exitCode=0 Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.822046 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerDied","Data":"45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49"} Jan 29 16:51:54 crc kubenswrapper[4685]: I0129 16:51:54.822339 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerStarted","Data":"6078b9447cc7150e2620bf7c37d5aca3bd457a0d9b4fc3c209b28c1837e08df0"} Jan 29 16:51:55 crc kubenswrapper[4685]: I0129 16:51:55.838567 4685 generic.go:334] "Generic (PLEG): container finished" podID="9836595a-8276-425c-8773-d14a23123a1d" containerID="df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601" exitCode=0 Jan 29 16:51:55 crc kubenswrapper[4685]: I0129 16:51:55.838731 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerDied","Data":"df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601"} Jan 29 16:51:56 crc kubenswrapper[4685]: I0129 16:51:56.846923 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerStarted","Data":"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821"} Jan 29 16:51:56 crc kubenswrapper[4685]: I0129 16:51:56.865599 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nfcmh" podStartSLOduration=2.46166378 podStartE2EDuration="3.865577473s" podCreationTimestamp="2026-01-29 16:51:53 +0000 UTC" firstStartedPulling="2026-01-29 16:51:54.824461839 +0000 UTC m=+1542.021818101" lastFinishedPulling="2026-01-29 16:51:56.228375532 +0000 UTC m=+1543.425731794" observedRunningTime="2026-01-29 16:51:56.863334991 +0000 UTC m=+1544.060691283" watchObservedRunningTime="2026-01-29 16:51:56.865577473 +0000 UTC m=+1544.062933735" Jan 29 16:51:59 crc kubenswrapper[4685]: I0129 16:51:59.602451 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:51:59 crc kubenswrapper[4685]: E0129 16:51:59.603321 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:52:04 crc kubenswrapper[4685]: I0129 16:52:04.211128 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:04 crc kubenswrapper[4685]: I0129 16:52:04.211690 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:04 crc kubenswrapper[4685]: I0129 16:52:04.274861 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:04 crc kubenswrapper[4685]: I0129 16:52:04.963421 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:05 crc kubenswrapper[4685]: I0129 16:52:05.010208 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:52:06 crc kubenswrapper[4685]: I0129 16:52:06.905051 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nfcmh" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="registry-server" containerID="cri-o://e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821" gracePeriod=2 Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.267241 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.295965 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content\") pod \"9836595a-8276-425c-8773-d14a23123a1d\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.296018 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv69m\" (UniqueName: \"kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m\") pod \"9836595a-8276-425c-8773-d14a23123a1d\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.296680 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities\") pod \"9836595a-8276-425c-8773-d14a23123a1d\" (UID: \"9836595a-8276-425c-8773-d14a23123a1d\") " Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.296737 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities" (OuterVolumeSpecName: "utilities") pod "9836595a-8276-425c-8773-d14a23123a1d" (UID: "9836595a-8276-425c-8773-d14a23123a1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.297063 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.301026 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m" (OuterVolumeSpecName: "kube-api-access-lv69m") pod "9836595a-8276-425c-8773-d14a23123a1d" (UID: "9836595a-8276-425c-8773-d14a23123a1d"). InnerVolumeSpecName "kube-api-access-lv69m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.319275 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9836595a-8276-425c-8773-d14a23123a1d" (UID: "9836595a-8276-425c-8773-d14a23123a1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.398451 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9836595a-8276-425c-8773-d14a23123a1d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.398550 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv69m\" (UniqueName: \"kubernetes.io/projected/9836595a-8276-425c-8773-d14a23123a1d-kube-api-access-lv69m\") on node \"crc\" DevicePath \"\"" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.913065 4685 generic.go:334] "Generic (PLEG): container finished" podID="9836595a-8276-425c-8773-d14a23123a1d" containerID="e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821" exitCode=0 Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.913111 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerDied","Data":"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821"} Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.913142 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfcmh" event={"ID":"9836595a-8276-425c-8773-d14a23123a1d","Type":"ContainerDied","Data":"6078b9447cc7150e2620bf7c37d5aca3bd457a0d9b4fc3c209b28c1837e08df0"} Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.913163 4685 scope.go:117] "RemoveContainer" containerID="e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.913310 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfcmh" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.929897 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.933651 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfcmh"] Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.946007 4685 scope.go:117] "RemoveContainer" containerID="df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.959525 4685 scope.go:117] "RemoveContainer" containerID="45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.981386 4685 scope.go:117] "RemoveContainer" containerID="e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821" Jan 29 16:52:07 crc kubenswrapper[4685]: E0129 16:52:07.981827 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821\": container with ID starting with e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821 not found: ID does not exist" containerID="e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.981865 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821"} err="failed to get container status \"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821\": rpc error: code = NotFound desc = could not find container \"e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821\": container with ID starting with e9445d2867812e8a8f7624bb015687d856c4c03506ceda502468895e40921821 not found: ID does not exist" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.981887 4685 scope.go:117] "RemoveContainer" containerID="df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601" Jan 29 16:52:07 crc kubenswrapper[4685]: E0129 16:52:07.982256 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601\": container with ID starting with df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601 not found: ID does not exist" containerID="df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.982287 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601"} err="failed to get container status \"df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601\": rpc error: code = NotFound desc = could not find container \"df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601\": container with ID starting with df0510346cd79ded98c7d0596357302db5fee595fe2bb7c3ad12bfa5b23a9601 not found: ID does not exist" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.982301 4685 scope.go:117] "RemoveContainer" containerID="45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49" Jan 29 16:52:07 crc kubenswrapper[4685]: E0129 16:52:07.982583 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49\": container with ID starting with 45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49 not found: ID does not exist" containerID="45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49" Jan 29 16:52:07 crc kubenswrapper[4685]: I0129 16:52:07.982602 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49"} err="failed to get container status \"45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49\": rpc error: code = NotFound desc = could not find container \"45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49\": container with ID starting with 45a3119d330f85b44d1aabceb4dbb744c4ace8c72c0baec7f78995661be6af49 not found: ID does not exist" Jan 29 16:52:09 crc kubenswrapper[4685]: I0129 16:52:09.608756 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9836595a-8276-425c-8773-d14a23123a1d" path="/var/lib/kubelet/pods/9836595a-8276-425c-8773-d14a23123a1d/volumes" Jan 29 16:52:14 crc kubenswrapper[4685]: I0129 16:52:14.601761 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:52:14 crc kubenswrapper[4685]: E0129 16:52:14.602708 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:52:26 crc kubenswrapper[4685]: I0129 16:52:26.601856 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:52:26 crc kubenswrapper[4685]: E0129 16:52:26.602573 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:52:41 crc kubenswrapper[4685]: I0129 16:52:41.602052 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:52:41 crc kubenswrapper[4685]: E0129 16:52:41.602712 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:52:52 crc kubenswrapper[4685]: I0129 16:52:52.601806 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:52:52 crc kubenswrapper[4685]: E0129 16:52:52.602495 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.161192 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:52:56 crc kubenswrapper[4685]: E0129 16:52:56.161725 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="extract-utilities" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.161738 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="extract-utilities" Jan 29 16:52:56 crc kubenswrapper[4685]: E0129 16:52:56.161747 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="extract-content" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.161753 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="extract-content" Jan 29 16:52:56 crc kubenswrapper[4685]: E0129 16:52:56.161766 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="registry-server" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.161772 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="registry-server" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.161863 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9836595a-8276-425c-8773-d14a23123a1d" containerName="registry-server" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.162594 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.169710 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.363586 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4fhd\" (UniqueName: \"kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.363933 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.364035 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.465119 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4fhd\" (UniqueName: \"kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.465189 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.465208 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.465788 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.465798 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.483735 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4fhd\" (UniqueName: \"kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd\") pod \"redhat-operators-sx8ft\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.783685 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:52:56 crc kubenswrapper[4685]: I0129 16:52:56.980378 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:52:57 crc kubenswrapper[4685]: I0129 16:52:57.159899 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerStarted","Data":"b07be7b4d72a1567caaa3d71d35962013ac1f8dae01fd49bfa9e49743ec61d8e"} Jan 29 16:52:58 crc kubenswrapper[4685]: I0129 16:52:58.169492 4685 generic.go:334] "Generic (PLEG): container finished" podID="346c1c98-5258-4426-9abf-da32042cbeb4" containerID="3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9" exitCode=0 Jan 29 16:52:58 crc kubenswrapper[4685]: I0129 16:52:58.169560 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerDied","Data":"3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9"} Jan 29 16:52:58 crc kubenswrapper[4685]: I0129 16:52:58.172701 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 29 16:53:00 crc kubenswrapper[4685]: I0129 16:53:00.186431 4685 generic.go:334] "Generic (PLEG): container finished" podID="346c1c98-5258-4426-9abf-da32042cbeb4" containerID="3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92" exitCode=0 Jan 29 16:53:00 crc kubenswrapper[4685]: I0129 16:53:00.186525 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerDied","Data":"3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92"} Jan 29 16:53:01 crc kubenswrapper[4685]: I0129 16:53:01.198663 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerStarted","Data":"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc"} Jan 29 16:53:01 crc kubenswrapper[4685]: I0129 16:53:01.219348 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sx8ft" podStartSLOduration=2.676778632 podStartE2EDuration="5.219328901s" podCreationTimestamp="2026-01-29 16:52:56 +0000 UTC" firstStartedPulling="2026-01-29 16:52:58.17215159 +0000 UTC m=+1605.369507892" lastFinishedPulling="2026-01-29 16:53:00.714701899 +0000 UTC m=+1607.912058161" observedRunningTime="2026-01-29 16:53:01.214835158 +0000 UTC m=+1608.412191450" watchObservedRunningTime="2026-01-29 16:53:01.219328901 +0000 UTC m=+1608.416685173" Jan 29 16:53:06 crc kubenswrapper[4685]: I0129 16:53:06.784299 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:06 crc kubenswrapper[4685]: I0129 16:53:06.784597 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:07 crc kubenswrapper[4685]: I0129 16:53:07.602502 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:53:07 crc kubenswrapper[4685]: E0129 16:53:07.602954 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:53:07 crc kubenswrapper[4685]: I0129 16:53:07.824844 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sx8ft" podUID="346c1c98-5258-4426-9abf-da32042cbeb4" containerName="registry-server" probeResult="failure" output=< Jan 29 16:53:07 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Jan 29 16:53:07 crc kubenswrapper[4685]: > Jan 29 16:53:16 crc kubenswrapper[4685]: I0129 16:53:16.828239 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:16 crc kubenswrapper[4685]: I0129 16:53:16.882110 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:17 crc kubenswrapper[4685]: I0129 16:53:17.062045 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:53:18 crc kubenswrapper[4685]: I0129 16:53:18.303197 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sx8ft" podUID="346c1c98-5258-4426-9abf-da32042cbeb4" containerName="registry-server" containerID="cri-o://4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc" gracePeriod=2 Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.206323 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.310693 4685 generic.go:334] "Generic (PLEG): container finished" podID="346c1c98-5258-4426-9abf-da32042cbeb4" containerID="4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc" exitCode=0 Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.310733 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerDied","Data":"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc"} Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.310775 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sx8ft" event={"ID":"346c1c98-5258-4426-9abf-da32042cbeb4","Type":"ContainerDied","Data":"b07be7b4d72a1567caaa3d71d35962013ac1f8dae01fd49bfa9e49743ec61d8e"} Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.310778 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sx8ft" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.310793 4685 scope.go:117] "RemoveContainer" containerID="4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.328550 4685 scope.go:117] "RemoveContainer" containerID="3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.343244 4685 scope.go:117] "RemoveContainer" containerID="3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.353987 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content\") pod \"346c1c98-5258-4426-9abf-da32042cbeb4\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.354193 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4fhd\" (UniqueName: \"kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd\") pod \"346c1c98-5258-4426-9abf-da32042cbeb4\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.354234 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities\") pod \"346c1c98-5258-4426-9abf-da32042cbeb4\" (UID: \"346c1c98-5258-4426-9abf-da32042cbeb4\") " Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.355380 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities" (OuterVolumeSpecName: "utilities") pod "346c1c98-5258-4426-9abf-da32042cbeb4" (UID: "346c1c98-5258-4426-9abf-da32042cbeb4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.359926 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd" (OuterVolumeSpecName: "kube-api-access-f4fhd") pod "346c1c98-5258-4426-9abf-da32042cbeb4" (UID: "346c1c98-5258-4426-9abf-da32042cbeb4"). InnerVolumeSpecName "kube-api-access-f4fhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.367503 4685 scope.go:117] "RemoveContainer" containerID="4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc" Jan 29 16:53:19 crc kubenswrapper[4685]: E0129 16:53:19.368013 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc\": container with ID starting with 4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc not found: ID does not exist" containerID="4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.368044 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc"} err="failed to get container status \"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc\": rpc error: code = NotFound desc = could not find container \"4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc\": container with ID starting with 4ff85d28947329d09d662532f075d740ac0b95318788a98f992520097ab914bc not found: ID does not exist" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.368065 4685 scope.go:117] "RemoveContainer" containerID="3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92" Jan 29 16:53:19 crc kubenswrapper[4685]: E0129 16:53:19.368859 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92\": container with ID starting with 3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92 not found: ID does not exist" containerID="3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.368899 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92"} err="failed to get container status \"3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92\": rpc error: code = NotFound desc = could not find container \"3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92\": container with ID starting with 3b43d46093cb421a76550ddf94ec0c1f2ed13da81f0097baeb47f4cd6dee8f92 not found: ID does not exist" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.368911 4685 scope.go:117] "RemoveContainer" containerID="3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9" Jan 29 16:53:19 crc kubenswrapper[4685]: E0129 16:53:19.371318 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9\": container with ID starting with 3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9 not found: ID does not exist" containerID="3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.371354 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9"} err="failed to get container status \"3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9\": rpc error: code = NotFound desc = could not find container \"3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9\": container with ID starting with 3903a9302e772aea48628a3119295e925ac1e963ce9d378b39fe26025a51fbb9 not found: ID does not exist" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.455329 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4fhd\" (UniqueName: \"kubernetes.io/projected/346c1c98-5258-4426-9abf-da32042cbeb4-kube-api-access-f4fhd\") on node \"crc\" DevicePath \"\"" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.455551 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-utilities\") on node \"crc\" DevicePath \"\"" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.463951 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "346c1c98-5258-4426-9abf-da32042cbeb4" (UID: "346c1c98-5258-4426-9abf-da32042cbeb4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.557222 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346c1c98-5258-4426-9abf-da32042cbeb4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.645962 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:53:19 crc kubenswrapper[4685]: I0129 16:53:19.654617 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sx8ft"] Jan 29 16:53:21 crc kubenswrapper[4685]: I0129 16:53:21.613673 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346c1c98-5258-4426-9abf-da32042cbeb4" path="/var/lib/kubelet/pods/346c1c98-5258-4426-9abf-da32042cbeb4/volumes" Jan 29 16:53:22 crc kubenswrapper[4685]: I0129 16:53:22.602448 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:53:22 crc kubenswrapper[4685]: E0129 16:53:22.603926 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" Jan 29 16:53:37 crc kubenswrapper[4685]: I0129 16:53:37.601505 4685 scope.go:117] "RemoveContainer" containerID="aaf3b58bbd5e0d381ef651afc2492d769746fa359a1e6e06414d45b76f33fdcd" Jan 29 16:53:37 crc kubenswrapper[4685]: E0129 16:53:37.602512 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbcwf_openshift-machine-config-operator(6ceb996f-40db-4a5d-915d-dd0f6d926751)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbcwf" podUID="6ceb996f-40db-4a5d-915d-dd0f6d926751" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515136710245024451 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015136710246017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015136704532016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015136704533015464 5ustar corecore